blob: 1b62484d3ef4fc5745c5de3f75b570a0ef29d7d8 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010048 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
49 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
50 "layers:{1} at {2}",
51 location.m_Function,
52 layersIndex,
53 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000054 }
55 else if (layersIndex >= graph->layers()->size())
56 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010057 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
58 location.m_Function,
59 layersIndex,
60 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000061 }
62}
63
Derek Lamberti0028d1b2019-02-20 13:57:42 +000064void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000065 unsigned int layersIndex,
66 unsigned int layerIndex,
67 const CheckLocation& location)
68{
69 if (graph->layers() == nullptr)
70 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010071 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
72 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
73 "layers:{1} at {2}",
74 location.m_Function,
75 layersIndex,
76 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000077 }
78 else if (layersIndex >= graph->layers()->size())
79 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010080 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
81 "layers:{1} at {2}",
82 location.m_Function,
83 layersIndex,
84 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000085 }
86 else if (layerIndex >= graph->layers()[layersIndex].size()
87 && layerIndex != VIRTUAL_LAYER_ID)
88 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010089 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
90 "layers:{1} layer:{2} at {3}",
91 location.m_Function,
92 layersIndex,
93 layerIndex,
94 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000095 }
96}
97
Derek Lamberti0028d1b2019-02-20 13:57:42 +000098void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +000099 const CheckLocation& location)
100{
101 if (rawPtr == nullptr)
102 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100103 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
104 location.m_Function,
105 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100114 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
115 location.m_Function,
116 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000117 }
118}
119
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000120void CheckConstTensorSize(const unsigned int constTensorSize,
121 const unsigned int tensorSize,
122 const CheckLocation& location)
123{
124 if (constTensorSize != tensorSize)
125 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100126 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
127 location.m_Function,
128 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000129 }
130}
131
Kevin May43a799c2019-02-08 16:31:42 +0000132#define CHECK_TENSOR_PTR(TENSOR_PTR) \
133 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
136 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
137
Mike Kellya0766c32019-02-19 17:22:07 +0000138#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
139 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
140
Kevin May43a799c2019-02-08 16:31:42 +0000141#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
142 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
143
144#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
145 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
146}
147
Saoirse Stewart263829c2019-02-19 15:54:14 +0000148bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
149{
150 const unsigned int actualSize = actual.GetNumDimensions();
151 if (actualSize != expected.size())
152 {
153 return false;
154 }
155
156 for (unsigned int i = 0u; i < actualSize; i++)
157 {
158 if (actual[i] != static_cast<unsigned int>(expected[i]))
159 {
160 return false;
161 }
162 }
163
164 return true;
165}
166
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000167Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000168: m_Network(nullptr, nullptr),
169//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000170m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000171{
172 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100173 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000174 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000175 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100176 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000177 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000178 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100179 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100180 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000181 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100183 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000184 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000185 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000186 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000187 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600188 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000189 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000190 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Keith Davis300ad562020-06-04 16:34:23 +0100191 m_ParserFunctions[Layer_FillLayer] = &Deserializer::ParseFill;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000192 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000193 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000194 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100195 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000196 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100197 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000198 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000199 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000200 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
201 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100202 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100203 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000204 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000205 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000206 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000207 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000208 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100209 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
James Conroy8d333182020-05-13 10:27:58 +0100210 m_ParserFunctions[Layer_QLstmLayer] = &Deserializer::ParseQLstm;
Derek Lamberti87acb272019-03-27 16:51:31 +0000211 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100212 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Finn Williams2605b232020-06-10 15:53:46 +0100213 m_ParserFunctions[Layer_RankLayer] = &Deserializer::ParseRank;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000214 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000215 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100216 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000217 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100218 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000220 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100221 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000222 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100223 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100224 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000225 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000226 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100227 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100228 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000229 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000230}
231
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000232Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000233{
234 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
235
236 switch(layerType)
237 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100238 case Layer::Layer_AbsLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000240 case Layer::Layer_ActivationLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000242 case Layer::Layer_AdditionLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100244 case Layer::Layer_ArgMinMaxLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000246 case Layer::Layer_BatchToSpaceNdLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000248 case Layer::Layer_BatchNormalizationLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100250 case Layer::Layer_ComparisonLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100252 case Layer::Layer_ConcatLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000254 case Layer::Layer_ConstantLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000256 case Layer::Layer_Convolution2dLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100258 case Layer::Layer_DepthToSpaceLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000260 case Layer::Layer_DepthwiseConvolution2dLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000262 case Layer::Layer_DequantizeLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000264 case Layer::Layer_DetectionPostProcessLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000266 case Layer::Layer_DivisionLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000268 case Layer::Layer_EqualLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000270 case Layer::Layer_FullyConnectedLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100272 case Layer::Layer_FillLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000274 case Layer::Layer_FloorLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000276 case Layer::Layer_GatherLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000278 case Layer::Layer_GreaterLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000280 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000281 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100282 case Layer::Layer_InstanceNormalizationLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000284 case Layer::Layer_L2NormalizationLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100286 case Layer::Layer_LogSoftmaxLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000288 case Layer::Layer_LstmLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000290 case Layer::Layer_MeanLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000292 case Layer::Layer_MinimumLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000294 case Layer::Layer_MaximumLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100296 case Layer::Layer_MergeLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000298 case Layer::Layer_MergerLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000300 case Layer::Layer_MultiplicationLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000302 case Layer::Layer_NormalizationLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000304 case Layer::Layer_OutputLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000306 case Layer::Layer_PadLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000308 case Layer::Layer_PermuteLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000310 case Layer::Layer_Pooling2dLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100312 case Layer::Layer_PreluLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100314 case Layer::Layer_QLstmLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000316 case Layer::Layer_QuantizeLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100318 case Layer::Layer_QuantizedLstmLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100320 case Layer::Layer_RankLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000322 case Layer::Layer_ReshapeLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000324 case Layer::Layer_ResizeBilinearLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100326 case Layer::Layer_ResizeLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000328 case Layer::Layer_RsqrtLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100330 case Layer::Layer_SliceLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000332 case Layer::Layer_SoftmaxLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000334 case Layer::Layer_SpaceToBatchNdLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100336 case Layer::Layer_SpaceToDepthLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000338 case Layer::Layer_SplitterLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100340 case Layer::Layer_StackLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100342 case Layer::Layer_StandInLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000344 case Layer::Layer_StridedSliceLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000346 case Layer::Layer_SubtractionLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100348 case Layer::Layer_SwitchLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100350 case Layer::Layer_TransposeConvolution2dLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000352 case Layer::Layer_TransposeLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000354 case Layer::Layer_NONE:
355 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100356 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000357 }
358}
359
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000360std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
361{
362 auto layer = GetBaseLayer(graph, index);
363 assert(layer);
364 return layer->layerName()->str();
365}
366
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000367int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000368{
369 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
370
371 if (layerType == Layer::Layer_InputLayer)
372 {
373 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
374 }
375 else if ( layerType == Layer::Layer_OutputLayer )
376 {
377 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
378 }
379 return 0;
380}
381
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000382armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000383{
384 switch (dataLayout)
385 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000386 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000387 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000388 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000389 default:
390 return armnn::DataLayout::NCHW;
391 }
392}
393
Mike Kellyaf484012019-02-20 16:53:11 +0000394armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
395{
396 switch (function)
397 {
398 case armnnSerializer::ActivationFunction_Sigmoid:
399 return armnn::ActivationFunction::Sigmoid;
400 case armnnSerializer::ActivationFunction_TanH:
401 return armnn::ActivationFunction::TanH;
402 case armnnSerializer::ActivationFunction_Linear:
403 return armnn::ActivationFunction::Linear;
404 case armnnSerializer::ActivationFunction_ReLu:
405 return armnn::ActivationFunction::ReLu;
406 case armnnSerializer::ActivationFunction_BoundedReLu:
407 return armnn::ActivationFunction::BoundedReLu;
408 case armnnSerializer::ActivationFunction_LeakyReLu:
409 return armnn::ActivationFunction::LeakyReLu;
410 case armnnSerializer::ActivationFunction_Abs:
411 return armnn::ActivationFunction::Abs;
412 case armnnSerializer::ActivationFunction_Sqrt:
413 return armnn::ActivationFunction::Sqrt;
414 case armnnSerializer::ActivationFunction_Square:
415 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000416 case armnnSerializer::ActivationFunction_Elu:
417 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000418 case armnnSerializer::ActivationFunction_HardSwish:
419 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000420 default:
421 return armnn::ActivationFunction::Sigmoid;
422 }
423}
424
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100425armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
426{
427 switch (function)
428 {
429 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
430 return armnn::ArgMinMaxFunction::Max;
431 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
432 default:
433 return armnn::ArgMinMaxFunction::Min;
434 }
435}
436
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100437armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
438{
439 switch (operation)
440 {
441 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
442 return armnn::ComparisonOperation::Equal;
443 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
444 return armnn::ComparisonOperation::Greater;
445 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
446 return armnn::ComparisonOperation::GreaterOrEqual;
447 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
448 return armnn::ComparisonOperation::Less;
449 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
450 return armnn::ComparisonOperation::LessOrEqual;
451 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
452 default:
453 return armnn::ComparisonOperation::NotEqual;
454 }
455}
456
josh minor4a3c6102020-01-06 16:40:46 -0600457armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
458{
459 switch (operation)
460 {
461 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
462 return armnn::UnaryOperation::Abs;
463 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
464 return armnn::UnaryOperation::Rsqrt;
465 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
466 return armnn::UnaryOperation::Sqrt;
467 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
468 return armnn::UnaryOperation::Exp;
469 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
470 return armnn::UnaryOperation::Neg;
471 default:
472 throw armnn::InvalidArgumentException("Unary operation unknown");
473 }
474}
475
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100476armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
477{
478 switch (method)
479 {
480 case armnnSerializer::ResizeMethod_NearestNeighbor:
481 return armnn::ResizeMethod::NearestNeighbor;
482 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000483 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100484 default:
485 return armnn::ResizeMethod::NearestNeighbor;
486 }
487}
488
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000489armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000490{
491 armnn::DataType type;
492 CHECK_TENSOR_PTR(tensorPtr);
493
494 switch (tensorPtr->dataType())
495 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000496 case DataType_QAsymmS8:
497 type = armnn::DataType::QAsymmS8;
498 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000499 case DataType_QSymmS8:
500 type = armnn::DataType::QSymmS8;
501 break;
Kevin May43a799c2019-02-08 16:31:42 +0000502 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000503 case DataType_QAsymmU8:
504 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000505 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000506 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000507 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000508 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000509 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000510 case DataType_Signed32:
511 type = armnn::DataType::Signed32;
512 break;
Kevin May43a799c2019-02-08 16:31:42 +0000513 case DataType_Float32:
514 type = armnn::DataType::Float32;
515 break;
516 case DataType_Float16:
517 type = armnn::DataType::Float16;
518 break;
519 case DataType_Boolean:
520 type = armnn::DataType::Boolean;
521 break;
522 default:
523 {
524 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100525 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
526 tensorPtr->dataType(),
527 EnumNameDataType(tensorPtr->dataType()),
528 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000529 }
530 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000531
Finn Williams2605b232020-06-10 15:53:46 +0100532 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
533 {
534 float quantizationScale = tensorPtr->quantizationScale();
535 int32_t quantizationOffset = tensorPtr->quantizationOffset();
536
537 return armnn::TensorInfo(armnn::TensorShape{armnn::Dimensionality::Scalar},
538 type,
539 quantizationScale,
540 quantizationOffset);
541 }
Kevin May43a799c2019-02-08 16:31:42 +0000542
543 auto dimensions = tensorPtr->dimensions();
544 unsigned int size = dimensions->size();
545 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
546
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000547 auto quantizationScales = tensorPtr->quantizationScales();
548
549 if (quantizationScales)
550 {
551 unsigned int quantizationScalesSize = quantizationScales->size();
552 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
553 unsigned int quantizationDim = tensorPtr->quantizationDim();
554 armnn::TensorInfo result(size,
555 outputDims.data(),
556 type,
557 scales,
558 quantizationDim);
559 return result;
560 }
561
562 float quantizationScale = tensorPtr->quantizationScale();
563 int32_t quantizationOffset = tensorPtr->quantizationOffset();
564
Kevin May43a799c2019-02-08 16:31:42 +0000565 // two statements (on purpose) for easier debugging:
566 armnn::TensorInfo result(size,
567 outputDims.data(),
568 type,
569 quantizationScale,
570 quantizationOffset);
571 return result;
572}
573
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000574armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000575{
576 CHECK_CONST_TENSOR_PTR(constTensorPtr);
577 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
578
579 switch (constTensorPtr->data_type())
580 {
581 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000582 {
583 auto byteData = constTensorPtr->data_as_ByteData()->data();
584 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
585 return armnn::ConstTensor(tensorInfo, byteData->data());
586 }
Mike Kellya0766c32019-02-19 17:22:07 +0000587 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000588 {
589 auto shortData = constTensorPtr->data_as_ShortData()->data();
590 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
591 return armnn::ConstTensor(tensorInfo, shortData->data());
592 }
Mike Kellya0766c32019-02-19 17:22:07 +0000593 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000594 {
595 auto intData = constTensorPtr->data_as_IntData()->data();
596 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
597 return armnn::ConstTensor(tensorInfo, intData->data());
598 }
Mike Kellya0766c32019-02-19 17:22:07 +0000599 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000600 {
601 auto longData = constTensorPtr->data_as_LongData()->data();
602 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
603 return armnn::ConstTensor(tensorInfo, longData->data());
604 }
Mike Kellya0766c32019-02-19 17:22:07 +0000605 default:
606 {
607 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100608 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
609 constTensorPtr->data_type(),
610 EnumNameConstTensorData(constTensorPtr->data_type()),
611 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000612 }
613 }
614}
615
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000616Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000617 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000618{
619 CHECK_LAYERS(graphPtr, 0, layerIndex);
620 auto layer = GetBaseLayer(graphPtr, layerIndex);
621 const auto& numInputs = layer->inputSlots()->size();
622
623 TensorRawPtrVector result(numInputs);
624
625 for (unsigned int i=0; i<numInputs; ++i)
626 {
627 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
628 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
629 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
630 }
631 return result;
632}
633
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000634Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000635 unsigned int layerIndex)
636{
637 CHECK_LAYERS(graphPtr, 0, layerIndex);
638 auto layer = GetBaseLayer(graphPtr, layerIndex);
639 const auto& numOutputs = layer->outputSlots()->size();
640
641 TensorRawPtrVector result(numOutputs);
642
643 for (unsigned int i=0; i<numOutputs; ++i)
644 {
645 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
646 }
647 return result;
648}
649
Derek Lamberti8ddae332019-02-21 16:29:43 +0000650void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000651{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000652 CHECK_LAYERS(graph, 0, layerIndex);
653 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100654 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
655 "layerName: {1} / {2}",
656 layerIndex,
657 layerName,
658 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000659}
660
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000661void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000662{
663 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000664 m_InputBindings.clear();
665 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000666}
667
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000668IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000669{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000670 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000671}
672
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000673IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000674{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000675 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000676}
677
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000678void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000679{
680 delete parser;
681}
682
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000683INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000684{
685 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000686 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
687 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000688}
689
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000690armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000691{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000692 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000693 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
694 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
695 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000696}
697
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000698Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000699{
700 if (binaryContent == nullptr)
701 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100702 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
703 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000704 }
705 flatbuffers::Verifier verifier(binaryContent, len);
706 if (verifier.VerifyBuffer<SerializedGraph>() == false)
707 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100708 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
709 "flatbuffers format. size:{0} {1}",
710 len,
711 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000712 }
713 return GetSerializedGraph(binaryContent);
714}
715
Derek Lamberti8ddae332019-02-21 16:29:43 +0000716INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000717{
718 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100719 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000720 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000721 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000722 {
723 if (layer->layer_type() != Layer_InputLayer &&
724 layer->layer_type() != Layer_OutputLayer)
725 {
726 // lookup and call the parser function
727 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000728 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000729 }
730 ++layerIndex;
731 }
732
Derek Lamberti8ddae332019-02-21 16:29:43 +0000733 SetupInputLayers(graph);
734 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000735
736 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100737 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000738 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100739 Connections& connections = graphIt.second;
740 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000741 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100742 const unsigned int outputSlotIndex = outputIt.first;
743 IOutputSlot* outputSlot = outputIt.second;
744 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000745 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100746 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000747 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100748 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000749 }
Kevin May43a799c2019-02-08 16:31:42 +0000750 }
751 }
752 }
753
754 return std::move(m_Network);
755}
756
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000757BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000758 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000759{
Jan Eilers8eb25602020-03-09 12:13:48 +0000760 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000761 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000762 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000763 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000764 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000765 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000766 }
767 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100768 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
769 name,
770 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000771}
772
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000773BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000774 const std::string& name) const
775{
Jan Eilers8eb25602020-03-09 12:13:48 +0000776 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000777 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000778 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000779 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000780 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000781 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000782 }
783 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100784 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
785 name,
786 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000787}
788
Tee Jungaa920c52019-11-05 10:48:25 +0000789unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
790{
791 for (unsigned int i = 0; i < graph->layers()->size(); i++)
792 {
793 auto layer = graph->layers()->Get(i);
794 if (layer->layer_type() == Layer::Layer_InputLayer)
795 {
796 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
797 if (layerBindingId == targetId)
798 {
799 return i;
800 }
801 }
802 }
803 throw ParseException("Input layer with given layerBindingId not found");
804}
805
806unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
807{
808 for (unsigned int i = 0; i < graph->layers()->size(); i++)
809 {
810 auto layer = graph->layers()->Get(i);
811 if (layer->layer_type() == Layer::Layer_OutputLayer)
812 {
813 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
814 if (layerBindingId == targetId)
815 {
816 return i;
817 }
818 }
819 }
820 throw ParseException("Output layer with given layerBindingId not found");
821}
822
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100823unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
824{
825 for (unsigned int i = 0; i < graph->layers()->size(); i++)
826 {
827 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
828 if (layer->index() == targetIndex)
829 {
830 return i;
831 }
832 }
833 throw ParseException("Layer with given index not found");
834}
835
Tee Jungaa920c52019-11-05 10:48:25 +0000836Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
837{
838 Deserializer::FeatureVersions versions;
839
840 if (graph->featureVersions())
841 {
842 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
843 }
844
845 return versions;
846}
847
Derek Lamberti8ddae332019-02-21 16:29:43 +0000848void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000849{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000850 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100851 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000852 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100853 m_InputBindings.reserve(numInputs);
854
855 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000856 {
Tee Jungaa920c52019-11-05 10:48:25 +0000857 unsigned int inputLayerIndex = 0xFFFFFFFF;
858 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
859 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100860 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000861 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
862 }
863 else
864 {
865 const int inputId = graph->inputIds()->Get(i);
866 inputLayerIndex = GetInputLayerInVector(graph, inputId);
867 }
868
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100869 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000870
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100871 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
872 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100873 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000874
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100875 IConnectableLayer* inputLayer =
876 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000877
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100878 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
879 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
880 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
881
Derek Lamberti8ddae332019-02-21 16:29:43 +0000882 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100883 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000884 }
885}
886
Derek Lamberti8ddae332019-02-21 16:29:43 +0000887void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000888{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000889 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100890 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000891 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100892 m_OutputBindings.reserve(numOutputs);
893
894 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000895 {
Tee Jungaa920c52019-11-05 10:48:25 +0000896 unsigned int outputLayerIndex = 0xFFFFFFFF;
897 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
898 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100899 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000900 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
901 }
902 else
903 {
904 const int outputId = graph->outputIds()->Get(i);
905 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
906 }
907
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100908 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000909
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100910 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
911 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500912 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000913
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100914 IConnectableLayer* outputLayer =
915 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000916
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100917 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
918
919 unsigned int sourceLayerIndex =
920 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
921 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500922 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(i)->tensorInfo());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100923
Derek Lamberti8ddae332019-02-21 16:29:43 +0000924 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100925 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000926 }
927}
928
Derek Lamberti8ddae332019-02-21 16:29:43 +0000929void Deserializer::RegisterOutputSlots(GraphPtr graph,
930 uint32_t layerIndex,
931 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000932{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000933 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100934 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100935 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
936 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000937 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100938 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
939 " for layer index: {2} {3}",
940 baseLayer->outputSlots()->size(),
941 layer->GetNumOutputSlots(),
942 layerIndex,
943 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000944 }
945
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100946 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000947 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100948 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
949 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
950 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
951 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000952 }
953}
954
Derek Lamberti8ddae332019-02-21 16:29:43 +0000955void Deserializer::RegisterInputSlots(GraphPtr graph,
956 uint32_t layerIndex,
957 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000958{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000959 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100960 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100961 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
962 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000963 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100964 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
965 " for layer index:{2} {3}",
966 baseLayer->inputSlots()->size(),
967 layer->GetNumInputSlots(),
968 layerIndex,
969 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000970 }
971
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100972 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000973 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100974 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
975 auto fbConnection = fbInputSlot->connection();
976 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
977 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000978 }
979}
980
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000981void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
982 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100983 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000984{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100985 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000986 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100987 m_GraphConnections[sourceLayerIndex] = Connections();
988 }
989
990 Connections& connections = m_GraphConnections[sourceLayerIndex];
991 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
992 {
993 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000994 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000995 else
996 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100997 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000998 }
999}
Kevin May43a799c2019-02-08 16:31:42 +00001000
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001001void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001002 uint32_t outputSlotIndex,
1003 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001004{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001005 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1006 {
1007 m_GraphConnections[sourceLayerIndex] = Connections();
1008 }
1009
1010 Connections& connections = m_GraphConnections[sourceLayerIndex];
1011 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1012 {
1013 throw ParseException("Same output slot index processed twice");
1014 }
1015
1016 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001017}
1018
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001019void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1020{
1021 CHECK_LAYERS(graph, 0, layerIndex);
1022 auto inputs = GetInputs(graph, layerIndex);
1023 CHECK_LOCATION();
1024 CHECK_VALID_SIZE(inputs.size(), 1);
1025
1026 auto outputs = GetOutputs(graph, layerIndex);
1027 CHECK_VALID_SIZE(outputs.size(), 1);
1028
1029 auto layerName = GetLayerName(graph, layerIndex);
1030
josh minor4a3c6102020-01-06 16:40:46 -06001031 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1032 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001033 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1034 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1035
1036 RegisterInputSlots(graph, layerIndex, layer);
1037 RegisterOutputSlots(graph, layerIndex, layer);
1038}
1039
Derek Lamberti8ddae332019-02-21 16:29:43 +00001040void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001041{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001042 CHECK_LAYERS(graph, 0, layerIndex);
1043 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001044 CHECK_LOCATION();
1045 CHECK_VALID_SIZE(inputs.size(), 1);
1046
Derek Lamberti8ddae332019-02-21 16:29:43 +00001047 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001048 CHECK_VALID_SIZE(outputs.size(), 1);
1049
Derek Lamberti8ddae332019-02-21 16:29:43 +00001050 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001051 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001052 auto serializerDescriptor = serializerLayer->descriptor();
1053
1054 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001055 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001056 descriptor.m_A = serializerDescriptor->a();
1057 descriptor.m_B = serializerDescriptor->b();
1058
1059 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1060 layerName.c_str());
1061 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1062 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1063
Derek Lamberti8ddae332019-02-21 16:29:43 +00001064 RegisterInputSlots(graph, layerIndex, layer);
1065 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001066}
1067
Derek Lamberti8ddae332019-02-21 16:29:43 +00001068void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001069{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001070 CHECK_LAYERS(graph, 0, layerIndex);
1071 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001072 CHECK_LOCATION();
1073 CHECK_VALID_SIZE(inputs.size(), 2);
1074
Derek Lamberti8ddae332019-02-21 16:29:43 +00001075 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001076 CHECK_VALID_SIZE(outputs.size(), 1);
1077
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001078 auto layerName = GetLayerName(graph, layerIndex);
1079 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001080
1081 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1082 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1083
Derek Lamberti8ddae332019-02-21 16:29:43 +00001084 RegisterInputSlots(graph, layerIndex, layer);
1085 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001086}
1087
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001088void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1089{
1090 CHECK_LAYERS(graph, 0, layerIndex);
1091 auto inputs = GetInputs(graph, layerIndex);
1092 CHECK_LOCATION();
1093 CHECK_VALID_SIZE(inputs.size(), 1);
1094
1095 auto outputs = GetOutputs(graph, layerIndex);
1096 CHECK_VALID_SIZE(outputs.size(), 1);
1097
1098 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1099 auto serializerDescriptor = serializerLayer->descriptor();
1100
1101 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001102 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001103 descriptor.m_Axis = serializerDescriptor->axis();
1104 auto layerName = GetLayerName(graph, layerIndex);
1105 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1106
1107 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1108 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1109
1110 RegisterInputSlots(graph, layerIndex, layer);
1111 RegisterOutputSlots(graph, layerIndex, layer);
1112}
1113
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001114void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1115{
1116 CHECK_LAYERS(graph, 0, layerIndex);
1117
1118 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1119 CHECK_VALID_SIZE(inputs.size(), 1);
1120
1121 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1122 CHECK_VALID_SIZE(outputs.size(), 1);
1123
1124 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1125 auto flatBufferCrops = flatBufferDescriptor->crops();
1126 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1127
1128 if (flatBufferCrops->Length() % 2 != 0)
1129 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001130 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001131 }
1132
1133 std::vector<std::pair<unsigned int, unsigned int>> crops;
1134 crops.reserve(flatBufferCrops->Length() / 2);
1135 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1136 {
1137 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1138 }
1139
1140 armnn::BatchToSpaceNdDescriptor descriptor;
1141 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1142 descriptor.m_BlockShape =
1143 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1144 descriptor.m_Crops = crops;
1145
1146 auto layerName = GetLayerName(graph, layerIndex);
1147 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1148
1149 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1150 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1151
1152 RegisterInputSlots(graph, layerIndex, layer);
1153 RegisterOutputSlots(graph, layerIndex, layer);
1154}
1155
ruoyan018e7fa232019-02-28 15:09:07 +00001156void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1157{
1158 CHECK_LAYERS(graph, 0, layerIndex);
1159
1160 auto inputs = GetInputs(graph, layerIndex);
1161 CHECK_VALID_SIZE(inputs.size(), 1);
1162
1163 auto outputs = GetOutputs(graph, layerIndex);
1164 CHECK_VALID_SIZE(outputs.size(), 1);
1165 auto outputInfo = ToTensorInfo(outputs[0]);
1166
ruoyan015c7ab052019-03-04 14:48:02 +00001167 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001168
1169 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1170 auto serializerDescriptor = serializerLayer->descriptor();
1171
1172 armnn::BatchNormalizationDescriptor descriptor;
1173 descriptor.m_Eps = serializerDescriptor->eps();
1174 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1175
1176 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1177 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1178 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1179 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1180
1181 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1182 mean,
1183 variance,
1184 beta,
1185 gamma,
1186 layerName.c_str());
1187 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1188
1189 RegisterInputSlots(graph, layerIndex, layer);
1190 RegisterOutputSlots(graph, layerIndex, layer);
1191}
1192
Conor Kennedy76277882019-02-26 08:29:54 +00001193void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1194{
1195 CHECK_LAYERS(graph, 0, layerIndex);
1196 CHECK_LOCATION();
1197
1198 auto outputs = GetOutputs(graph, layerIndex);
1199 CHECK_VALID_SIZE(outputs.size(), 1);
1200
1201 auto layerName = GetLayerName(graph, layerIndex);
1202
1203 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1204 auto serializerInput = serializerLayer->input();
1205
1206 armnn::ConstTensor input = ToConstTensor(serializerInput);
1207
1208 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1209
1210 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1211 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1212
1213 RegisterOutputSlots(graph, layerIndex, layer);
1214}
1215
Derek Lamberti8ddae332019-02-21 16:29:43 +00001216void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001217{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001218 CHECK_LAYERS(graph, 0, layerIndex);
1219 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001220 CHECK_LOCATION();
1221 CHECK_VALID_SIZE(inputs.size(), 1);
1222
Derek Lamberti8ddae332019-02-21 16:29:43 +00001223 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001224 CHECK_VALID_SIZE(outputs.size(), 1);
1225
Derek Lamberti8ddae332019-02-21 16:29:43 +00001226 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001227 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001228 auto serializerDescriptor = serializerLayer->descriptor();
1229
1230 armnn::Convolution2dDescriptor descriptor;
1231 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1232 descriptor.m_PadRight = serializerDescriptor->padRight();
1233 descriptor.m_PadTop = serializerDescriptor->padTop();
1234 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1235 descriptor.m_StrideX = serializerDescriptor->strideX();
1236 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001237 descriptor.m_DilationX = serializerDescriptor->dilationX();
1238 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001239 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1240 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1241
1242 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1243 armnn::ConstTensor biases;
1244
Matteo Martincighfc598e12019-05-14 10:36:13 +01001245 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001246 if (descriptor.m_BiasEnabled)
1247 {
1248 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001249 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001250 }
1251 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1252 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001253 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001254 layerName.c_str());
1255 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1256 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1257
Derek Lamberti8ddae332019-02-21 16:29:43 +00001258 RegisterInputSlots(graph, layerIndex, layer);
1259 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001260}
1261
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001262void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1263{
1264 CHECK_LAYERS(graph, 0, layerIndex);
1265
1266 auto inputs = GetInputs(graph, layerIndex);
1267 CHECK_VALID_SIZE(inputs.size(), 1);
1268
1269 auto outputs = GetOutputs(graph, layerIndex);
1270 CHECK_VALID_SIZE(outputs.size(), 1);
1271
1272 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1273
1274 armnn::DepthToSpaceDescriptor descriptor;
1275 descriptor.m_BlockSize = fbDescriptor->blockSize();
1276 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1277
1278 auto layerName = GetLayerName(graph, layerIndex);
1279 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1280
1281 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1282 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1283
1284 RegisterInputSlots(graph, layerIndex, layer);
1285 RegisterOutputSlots(graph, layerIndex, layer);
1286}
1287
Derek Lamberti8ddae332019-02-21 16:29:43 +00001288void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001289{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001290 CHECK_LAYERS(graph, 0, layerIndex);
1291 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001292 CHECK_LOCATION();
1293 CHECK_VALID_SIZE(inputs.size(), 1);
1294
Derek Lamberti8ddae332019-02-21 16:29:43 +00001295 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001296 CHECK_VALID_SIZE(outputs.size(), 1);
1297
Derek Lamberti8ddae332019-02-21 16:29:43 +00001298 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001299 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001300 auto serializerDescriptor = serializerLayer->descriptor();
1301
1302 armnn::DepthwiseConvolution2dDescriptor descriptor;
1303 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1304 descriptor.m_PadRight = serializerDescriptor->padRight();
1305 descriptor.m_PadTop = serializerDescriptor->padTop();
1306 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1307 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001308 descriptor.m_StrideY = serializerDescriptor->strideY();
1309 descriptor.m_DilationX = serializerDescriptor->dilationX();
1310 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001311 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1312 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1313
1314 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1315 armnn::ConstTensor biases;
1316
Matteo Martincighfc598e12019-05-14 10:36:13 +01001317 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001318 if (descriptor.m_BiasEnabled)
1319 {
1320 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001321 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001322 }
1323 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1324 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001325 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001326 layerName.c_str());
1327
1328 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1329 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1330
Derek Lamberti8ddae332019-02-21 16:29:43 +00001331 RegisterInputSlots(graph, layerIndex, layer);
1332 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001333}
1334
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001335void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1336{
1337 CHECK_LAYERS(graph, 0, layerIndex);
1338 auto inputs = GetInputs(graph, layerIndex);
1339 CHECK_LOCATION();
1340 CHECK_VALID_SIZE(inputs.size(), 2);
1341
1342 auto outputs = GetOutputs(graph, layerIndex);
1343 CHECK_VALID_SIZE(outputs.size(), 4);
1344
1345 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1346 auto layerName = GetLayerName(graph, layerIndex);
1347 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1348
1349 armnn::DetectionPostProcessDescriptor descriptor;
1350 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1351 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1352 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1353 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1354 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1355 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1356 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1357 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1358 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1359 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1360 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1361
1362 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1363
1364 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1365 anchors,
1366 layerName.c_str());
1367
1368 for (unsigned int i = 0; i < 4; i++)
1369 {
1370 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1371 }
1372
1373 RegisterInputSlots(graph, layerIndex, layer);
1374 RegisterOutputSlots(graph, layerIndex, layer);
1375}
1376
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001377void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1378{
1379 CHECK_LAYERS(graph, 0, layerIndex);
1380 auto inputs = GetInputs(graph, layerIndex);
1381 CHECK_LOCATION();
1382 CHECK_VALID_SIZE(inputs.size(), 2);
1383
1384 auto outputs = GetOutputs(graph, layerIndex);
1385 CHECK_VALID_SIZE(outputs.size(), 1);
1386
1387 auto layerName = GetLayerName(graph, layerIndex);
1388 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1389
1390 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1391 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1392
1393 RegisterInputSlots(graph, layerIndex, layer);
1394 RegisterOutputSlots(graph, layerIndex, layer);
1395}
1396
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001397void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1398{
1399 CHECK_LAYERS(graph, 0, layerIndex);
1400 auto inputs = GetInputs(graph, layerIndex);
1401 CHECK_LOCATION();
1402 CHECK_VALID_SIZE(inputs.size(), 2);
1403
1404 auto outputs = GetOutputs(graph, layerIndex);
1405 CHECK_VALID_SIZE(outputs.size(), 1);
1406
1407 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001408 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1409 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001410
1411 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1412 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1413
1414 RegisterInputSlots(graph, layerIndex, layer);
1415 RegisterOutputSlots(graph, layerIndex, layer);
1416}
1417
Keith Davis300ad562020-06-04 16:34:23 +01001418void Deserializer::ParseFill(GraphPtr graph, unsigned int layerIndex)
1419{
1420 CHECK_LAYERS(graph, 0, layerIndex);
1421 auto inputs = GetInputs(graph, layerIndex);
1422 CHECK_LOCATION();
1423 CHECK_VALID_SIZE(inputs.size(), 1);
1424
1425 auto outputs = GetOutputs(graph, layerIndex);
1426 CHECK_VALID_SIZE(outputs.size(), 1);
1427
1428 auto layerName = GetLayerName(graph, layerIndex);
1429 armnn::FillDescriptor descriptor(1.0f);
1430 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1431
1432 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1433 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1434
1435 RegisterInputSlots(graph, layerIndex, layer);
1436 RegisterOutputSlots(graph, layerIndex, layer);
1437}
1438
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001439void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1440{
1441 CHECK_LAYERS(graph, 0, layerIndex);
1442 auto inputs = GetInputs(graph, layerIndex);
1443 CHECK_LOCATION();
1444 CHECK_VALID_SIZE(inputs.size(), 2);
1445
1446 auto outputs = GetOutputs(graph, layerIndex);
1447 CHECK_VALID_SIZE(outputs.size(), 1);
1448
1449 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001450 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1451 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001452
1453 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1454 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1455
1456 RegisterInputSlots(graph, layerIndex, layer);
1457 RegisterOutputSlots(graph, layerIndex, layer);
1458}
1459
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001460void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1461{
1462 CHECK_LAYERS(graph, 0, layerIndex);
1463
1464 auto inputs = GetInputs(graph, layerIndex);
1465 CHECK_VALID_SIZE(inputs.size(), 1);
1466
1467 auto outputs = GetOutputs(graph, layerIndex);
1468 CHECK_VALID_SIZE(outputs.size(), 1);
1469
1470 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1471 auto fbDescriptor = fbLayer->descriptor();
1472
1473 armnn::InstanceNormalizationDescriptor descriptor;
1474 descriptor.m_Gamma = fbDescriptor->gamma();
1475 descriptor.m_Beta = fbDescriptor->beta();
1476 descriptor.m_Eps = fbDescriptor->eps();
1477 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1478
1479 const std::string layerName = GetLayerName(graph, layerIndex);
1480 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1481
1482 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1483 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1484
1485 RegisterInputSlots(graph, layerIndex, layer);
1486 RegisterOutputSlots(graph, layerIndex, layer);
1487}
1488
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001489void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1490{
1491 CHECK_LAYERS(graph, 0, layerIndex);
1492
1493 auto inputs = GetInputs(graph, layerIndex);
1494 CHECK_VALID_SIZE(inputs.size(), 1);
1495
1496 auto outputs = GetOutputs(graph, layerIndex);
1497 CHECK_VALID_SIZE(outputs.size(), 1);
1498 auto outputInfo = ToTensorInfo(outputs[0]);
1499
1500 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1501 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1502
1503 auto layerName = GetLayerName(graph, layerIndex);
1504 armnn::L2NormalizationDescriptor descriptor;
1505 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001506 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001507
1508 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1509 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1510
1511 RegisterInputSlots(graph, layerIndex, layer);
1512 RegisterOutputSlots(graph, layerIndex, layer);
1513}
1514
Sadik Armagan26257852019-10-14 13:00:47 +01001515void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1516{
1517 CHECK_LAYERS(graph, 0, layerIndex);
1518
1519 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1520 CHECK_VALID_SIZE(inputs.size(), 1);
1521
1522 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1523 CHECK_VALID_SIZE(outputs.size(), 1);
1524
1525 armnn::LogSoftmaxDescriptor descriptor;
1526 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1527 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1528 auto layerName = GetLayerName(graph, layerIndex);
1529
1530 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1531
1532 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1533 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1534
1535 RegisterInputSlots(graph, layerIndex, layer);
1536 RegisterOutputSlots(graph, layerIndex, layer);
1537}
1538
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001539void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1540{
1541 CHECK_LAYERS(graph, 0, layerIndex);
1542 auto inputs = GetInputs(graph, layerIndex);
1543 CHECK_LOCATION();
1544 CHECK_VALID_SIZE(inputs.size(), 2);
1545
1546 auto outputs = GetOutputs(graph, layerIndex);
1547 CHECK_VALID_SIZE(outputs.size(), 1);
1548
1549 auto layerName = GetLayerName(graph, layerIndex);
1550 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1551
1552 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1553 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1554
1555 RegisterInputSlots(graph, layerIndex, layer);
1556 RegisterOutputSlots(graph, layerIndex, layer);
1557}
1558
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001559void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1560{
1561 CHECK_LAYERS(graph, 0, layerIndex);
1562 auto inputs = GetInputs(graph, layerIndex);
1563 CHECK_LOCATION();
1564 CHECK_VALID_SIZE(inputs.size(), 2);
1565
1566 auto outputs = GetOutputs(graph, layerIndex);
1567 CHECK_VALID_SIZE(outputs.size(), 1);
1568
1569 auto layerName = GetLayerName(graph, layerIndex);
1570 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1571
1572 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1573 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1574
1575 RegisterInputSlots(graph, layerIndex, layer);
1576 RegisterOutputSlots(graph, layerIndex, layer);
1577}
1578
Jim Flynne242f2d2019-05-22 14:24:13 +01001579const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1580 unsigned int layerIndex)
1581{
1582 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1583
1584 switch (layerType)
1585 {
1586 case Layer::Layer_ConcatLayer:
1587 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1588 case Layer::Layer_MergerLayer:
1589 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1590 default:
1591 throw armnn::Exception("unknown layer type, should be concat or merger");
1592 }
1593}
1594
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001595void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1596{
1597 CHECK_LAYERS(graph, 0, layerIndex);
1598 CHECK_LOCATION();
1599
1600 auto inputs = GetInputs(graph, layerIndex);
1601 CHECK_VALID_SIZE(inputs.size(), 2);
1602
1603 auto outputs = GetOutputs(graph, layerIndex);
1604 CHECK_VALID_SIZE(outputs.size(), 1);
1605
1606 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1607 auto fbDescriptor = fbLayer->descriptor();
1608
1609 armnn::ComparisonDescriptor descriptor;
1610 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1611
1612 const std::string& layerName = GetLayerName(graph, layerIndex);
1613 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1614
1615 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1616 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1617
1618 RegisterInputSlots(graph, layerIndex, layer);
1619 RegisterOutputSlots(graph, layerIndex, layer);
1620}
1621
josh minor4a3c6102020-01-06 16:40:46 -06001622void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1623{
1624 CHECK_LAYERS(graph, 0, layerIndex);
1625 CHECK_LOCATION();
1626
1627 auto inputs = GetInputs(graph, layerIndex);
1628 CHECK_VALID_SIZE(inputs.size(), 1);
1629
1630 auto outputs = GetOutputs(graph, layerIndex);
1631 CHECK_VALID_SIZE(outputs.size(), 1);
1632
1633 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1634 auto fbDescriptor = fbLayer->descriptor();
1635
1636 armnn::ElementwiseUnaryDescriptor descriptor;
1637 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1638
1639 const std::string& layerName = GetLayerName(graph, layerIndex);
1640 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1641
1642 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1643 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1644
1645 RegisterInputSlots(graph, layerIndex, layer);
1646 RegisterOutputSlots(graph, layerIndex, layer);
1647}
1648
Jim Flynn906f9462019-05-10 13:55:21 +01001649void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001650{
1651 CHECK_LAYERS(graph, 0, layerIndex);
1652 CHECK_LOCATION();
1653
1654 auto outputs = GetOutputs(graph, layerIndex);
1655 CHECK_VALID_SIZE(outputs.size(), 1);
1656
Jim Flynnac25a1b2019-02-28 10:40:49 +00001657 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001658 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1659 unsigned int numViews = originsDescriptor->numViews();
1660 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001661
1662 // can now check the number of inputs == number of views
1663 auto inputs = GetInputs(graph, layerIndex);
1664 CHECK_VALID_SIZE(inputs.size(), numViews);
1665
1666 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001667 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001668 for (unsigned int v = 0; v < numViews; ++v)
1669 {
1670 auto originPtr = originsPtr->Get(v);
1671 for (unsigned int d = 0; d < numDimensions; ++d)
1672 {
1673 uint32_t value = originPtr->data()->Get(d);
1674 descriptor.SetViewOriginCoord(v, d, value);
1675 }
1676 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001677 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001678
Jim Flynn906f9462019-05-10 13:55:21 +01001679 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001680 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1681 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1682
1683 RegisterInputSlots(graph, layerIndex, layer);
1684 RegisterOutputSlots(graph, layerIndex, layer);
1685}
1686
Derek Lamberti8ddae332019-02-21 16:29:43 +00001687void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001688{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001689 CHECK_LAYERS(graph, 0, layerIndex);
1690 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001691 CHECK_LOCATION();
1692 CHECK_VALID_SIZE(inputs.size(), 2);
1693
Derek Lamberti8ddae332019-02-21 16:29:43 +00001694 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001695 CHECK_VALID_SIZE(outputs.size(), 1);
1696
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001697 auto layerName = GetLayerName(graph, layerIndex);
1698 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001699
1700 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1701 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1702
Derek Lamberti8ddae332019-02-21 16:29:43 +00001703 RegisterInputSlots(graph, layerIndex, layer);
1704 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001705}
1706
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001707void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1708{
1709 CHECK_LAYERS(graph, 0, layerIndex);
1710 CHECK_LOCATION();
1711
1712 auto inputs = GetInputs(graph, layerIndex);
1713 CHECK_VALID_SIZE(inputs.size(), 1);
1714
1715 auto outputs = GetOutputs(graph, layerIndex);
1716 CHECK_VALID_SIZE(outputs.size(), 1);
1717
1718 auto layerName = GetLayerName(graph, layerIndex);
1719
1720 armnn::IConnectableLayer* layer;
1721
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001722 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001723
1724 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1725 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1726
1727 RegisterInputSlots(graph, layerIndex, layer);
1728 RegisterOutputSlots(graph, layerIndex, layer);
1729}
1730
Derek Lamberti8ddae332019-02-21 16:29:43 +00001731void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001732{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001733 CHECK_LAYERS(graph, 0, layerIndex);
1734 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001735 CHECK_LOCATION();
1736 CHECK_VALID_SIZE(inputs.size(), 1);
1737
Derek Lamberti8ddae332019-02-21 16:29:43 +00001738 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001739 CHECK_VALID_SIZE(outputs.size(), 1);
1740
Derek Lamberti8ddae332019-02-21 16:29:43 +00001741 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001742 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001743 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1744
1745 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1746 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1747 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1748
1749 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1750
1751 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001752 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001753 if (flatBufferDescriptor->biasEnabled())
1754 {
1755 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001756 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001757 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001758 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1759 weightsTensor,
1760 optionalBiases,
1761 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001762
1763 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1764 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1765
Derek Lamberti8ddae332019-02-21 16:29:43 +00001766 RegisterInputSlots(graph, layerIndex, layer);
1767 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001768}
1769
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001770void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1771{
1772 CHECK_LAYERS(graph, 0, layerIndex);
1773
1774 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1775 CHECK_VALID_SIZE(inputs.size(), 1);
1776
1777 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1778 CHECK_VALID_SIZE(outputs.size(), 1);
1779
1780 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1781 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001782 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001783
1784 if (flatBufferPadList->Length() % 2 != 0)
1785 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001786 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1787 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001788 }
1789
1790 std::vector<std::pair<unsigned int, unsigned int>> padList;
1791 padList.reserve(flatBufferPadList->Length() / 2);
1792 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1793 {
1794 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1795 }
1796
David Monahan34757812019-06-19 11:47:21 +01001797 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001798
1799 auto layerName = GetLayerName(graph, layerIndex);
1800 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1801
1802 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1803 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1804
1805 RegisterInputSlots(graph, layerIndex, layer);
1806 RegisterOutputSlots(graph, layerIndex, layer);
1807}
1808
Derek Lamberti8ddae332019-02-21 16:29:43 +00001809void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001810{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001811 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001812
1813 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001814 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001815
Derek Lamberti8ddae332019-02-21 16:29:43 +00001816 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001817 CHECK_VALID_SIZE(inputs.size(), 1);
1818
Derek Lamberti8ddae332019-02-21 16:29:43 +00001819 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001820 CHECK_VALID_SIZE(outputs.size(), 1);
1821 auto outputInfo = ToTensorInfo(outputs[0]);
1822
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001823 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001824 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1825
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001826 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001827 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1828
Derek Lamberti8ddae332019-02-21 16:29:43 +00001829 RegisterInputSlots(graph, layerIndex, layer);
1830 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001831}
1832
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001833armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001834 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001835{
Jan Eilers8eb25602020-03-09 12:13:48 +00001836 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001837 armnn::Pooling2dDescriptor desc;
1838
1839 switch (pooling2dDesc->poolType())
1840 {
1841 case PoolingAlgorithm_Average:
1842 {
1843 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001844 break;
1845 }
1846 case PoolingAlgorithm_Max:
1847 {
1848 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001849 break;
1850 }
1851 default:
1852 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001853 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001854 }
1855 }
1856
1857 switch (pooling2dDesc->outputShapeRounding())
1858 {
1859 case OutputShapeRounding_Floor:
1860 {
1861 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1862 break;
1863 }
1864 case OutputShapeRounding_Ceiling:
1865 {
1866 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1867 break;
1868 }
1869 default:
1870 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001871 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001872 }
1873 }
1874
1875 switch (pooling2dDesc->paddingMethod())
1876 {
1877 case PaddingMethod_Exclude:
1878 {
1879 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1880 break;
1881 }
1882 case PaddingMethod_IgnoreValue:
1883 {
1884 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1885 break;
1886 }
1887 default:
1888 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001889 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001890 }
1891 }
1892
1893 switch (pooling2dDesc->dataLayout())
1894 {
1895 case DataLayout_NCHW:
1896 {
1897 desc.m_DataLayout = armnn::DataLayout::NCHW;
1898 break;
1899 }
1900 case DataLayout_NHWC:
1901 {
1902 desc.m_DataLayout = armnn::DataLayout::NHWC;
1903 break;
1904 }
1905 default:
1906 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001907 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001908 }
1909 }
1910
1911 desc.m_PadRight = pooling2dDesc->padRight();
1912 desc.m_PadLeft = pooling2dDesc->padLeft();
1913 desc.m_PadBottom = pooling2dDesc->padBottom();
1914 desc.m_PadTop = pooling2dDesc->padTop();
1915 desc.m_StrideX = pooling2dDesc->strideX();
1916 desc.m_StrideY = pooling2dDesc->strideY();
1917 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1918 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1919
1920 return desc;
1921}
1922
Derek Lamberti8ddae332019-02-21 16:29:43 +00001923void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001924{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001925 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001926
Derek Lamberti8ddae332019-02-21 16:29:43 +00001927 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001928 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001929 CHECK_VALID_SIZE(inputs.size(), 1);
1930
Derek Lamberti8ddae332019-02-21 16:29:43 +00001931 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001932 CHECK_VALID_SIZE(outputs.size(), 1);
1933 auto outputInfo = ToTensorInfo(outputs[0]);
1934
1935 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001936 auto layerName = GetLayerName(graph, layerIndex);
1937 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001938 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1939
Derek Lamberti8ddae332019-02-21 16:29:43 +00001940 RegisterInputSlots(graph, layerIndex, layer);
1941 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001942}
1943
Derek Lamberti87acb272019-03-27 16:51:31 +00001944void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1945{
1946 CHECK_LAYERS(graph, 0, layerIndex);
1947
1948 auto inputs = GetInputs(graph, layerIndex);
1949 CHECK_VALID_SIZE(inputs.size(), 1);
1950
1951 auto outputs = GetOutputs(graph, layerIndex);
1952 CHECK_VALID_SIZE(outputs.size(), 1);
1953 auto outputInfo = ToTensorInfo(outputs[0]);
1954
1955 auto layerName = GetLayerName(graph, layerIndex);
1956 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1957 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1958
1959 RegisterInputSlots(graph, layerIndex, layer);
1960 RegisterOutputSlots(graph, layerIndex, layer);
1961}
1962
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001963armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001964 const std::vector<uint32_t>& targetDimsIn)
1965{
1966 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1967 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1968
1969 if (stretchDim != targetDimsIn.end())
1970 {
1971 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1972 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001973 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
1974 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00001975 }
1976
1977 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01001978 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00001979 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1980
1981 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1982 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1983 }
1984
1985 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1986
1987 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1988 reshapeInfo.SetShape(outputShape);
1989
1990 return reshapeInfo;
1991}
1992
Finn Williams2605b232020-06-10 15:53:46 +01001993void Deserializer::ParseRank(GraphPtr graph, unsigned int layerIndex)
1994{
1995 CHECK_LAYERS(graph, 0, layerIndex);
1996
1997 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1998 CHECK_VALID_SIZE(inputs.size(), 1);
1999
2000 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2001 CHECK_VALID_SIZE(outputs.size(), 1);
2002
2003 auto layerName = GetLayerName(graph, layerIndex);
2004 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2005
2006 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2007 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2008
2009 RegisterInputSlots(graph, layerIndex, layer);
2010 RegisterOutputSlots(graph, layerIndex, layer);
2011}
2012
Derek Lamberti8ddae332019-02-21 16:29:43 +00002013void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002014{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002015 CHECK_LAYERS(graph, 0, layerIndex);
2016 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002017
Derek Lamberti8ddae332019-02-21 16:29:43 +00002018 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002019 CHECK_VALID_SIZE(outputs.size(), 1);
2020
2021 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2022 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2023
Derek Lamberti8ddae332019-02-21 16:29:43 +00002024 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002025 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2026
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002027 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002028 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2029
2030 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2031 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2032
2033 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2034 {
2035 std::stringstream ss;
2036 ss << "New shape defined in reshape parameters "
2037 << reshapeOutputTensorShape
2038 << " does not equal output shape "
2039 << actualOutputTensorInfo.GetShape()
2040 << ": "
2041 << CHECK_LOCATION().AsString();
2042 throw ParseException(ss.str());
2043 }
2044
2045 armnn::ReshapeDescriptor reshapeDesc;
2046 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2047
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002048 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002049 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2050 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2051
Derek Lamberti8ddae332019-02-21 16:29:43 +00002052 RegisterInputSlots(graph, layerIndex, layer);
2053 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002054}
2055
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002056void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2057{
2058 CHECK_LAYERS(graph, 0, layerIndex);
2059
2060 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2061 CHECK_VALID_SIZE(inputs.size(), 1);
2062
2063 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2064 CHECK_VALID_SIZE(outputs.size(), 1);
2065
2066 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2067
2068 armnn::ResizeDescriptor descriptor;
2069 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2070 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2071 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2072 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002073 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2074 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002075
2076 auto layerName = GetLayerName(graph, layerIndex);
2077 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2078
2079 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2080 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2081
2082 RegisterInputSlots(graph, layerIndex, layer);
2083 RegisterOutputSlots(graph, layerIndex, layer);
2084}
2085
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002086void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2087{
2088 CHECK_LAYERS(graph, 0, layerIndex);
2089
2090 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2091 CHECK_VALID_SIZE(inputs.size(), 1);
2092
2093 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2094 CHECK_VALID_SIZE(outputs.size(), 1);
2095
2096 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2097
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002098 armnn::ResizeDescriptor descriptor;
2099 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002100 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002101 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2102 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002103 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2104 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002105
2106 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002107 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002108
2109 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2110 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2111
2112 RegisterInputSlots(graph, layerIndex, layer);
2113 RegisterOutputSlots(graph, layerIndex, layer);
2114}
2115
Derek Lamberti8ddae332019-02-21 16:29:43 +00002116void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002117{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002118 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002119
Derek Lamberti8ddae332019-02-21 16:29:43 +00002120 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002121 CHECK_VALID_SIZE(inputs.size(), 1);
2122
Derek Lamberti8ddae332019-02-21 16:29:43 +00002123 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002124 CHECK_VALID_SIZE(outputs.size(), 1);
2125
2126 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002127 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002128 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002129
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002130 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2131
2132 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2133 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2134
Derek Lamberti8ddae332019-02-21 16:29:43 +00002135 RegisterInputSlots(graph, layerIndex, layer);
2136 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002137}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002138
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002139void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2140{
2141 CHECK_LAYERS(graph, 0, layerIndex);
2142
2143 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2144 CHECK_VALID_SIZE(inputs.size(), 1);
2145
2146 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2147 CHECK_VALID_SIZE(outputs.size(), 1);
2148
2149 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2150 auto flatBufferPadList = flatBufferDescriptor->padList();
2151 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2152
2153 if (flatBufferPadList->Length() % 2 != 0)
2154 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002155 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2156 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002157 }
2158
2159 std::vector<std::pair<unsigned int, unsigned int>> padList;
2160 padList.reserve(flatBufferPadList->Length() / 2);
2161 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2162 {
2163 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2164 }
2165
2166 armnn::SpaceToBatchNdDescriptor descriptor;
2167 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2168 descriptor.m_BlockShape =
2169 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2170 descriptor.m_PadList = padList;
2171
2172 auto layerName = GetLayerName(graph, layerIndex);
2173 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2174
2175 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2176 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2177
2178 RegisterInputSlots(graph, layerIndex, layer);
2179 RegisterOutputSlots(graph, layerIndex, layer);
2180}
2181
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002182void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2183{
2184 CHECK_LAYERS(graph, 0, layerIndex);
2185
2186 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2187 CHECK_VALID_SIZE(inputs.size(), 1);
2188
2189 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2190 CHECK_VALID_SIZE(outputs.size(), 1);
2191
2192 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2193
2194 armnn::SpaceToDepthDescriptor descriptor;
2195 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2196 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2197
2198 auto layerName = GetLayerName(graph, layerIndex);
2199 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2200
2201 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2202 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2203
2204 RegisterInputSlots(graph, layerIndex, layer);
2205 RegisterOutputSlots(graph, layerIndex, layer);
2206}
2207
Nina Drozd57728782019-02-27 10:53:27 +00002208armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2209 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2210 unsigned int layerIndex)
2211{
Jan Eilers8eb25602020-03-09 12:13:48 +00002212 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002213 armnn::NormalizationDescriptor desc;
2214
2215 switch (normalizationDescriptor->normChannelType())
2216 {
2217 case NormalizationAlgorithmChannel_Across:
2218 {
2219 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2220 break;
2221 }
2222 case NormalizationAlgorithmChannel_Within:
2223 {
2224 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2225 break;
2226 }
2227 default:
2228 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002229 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002230 }
2231 }
2232
2233 switch (normalizationDescriptor->normMethodType())
2234 {
2235 case NormalizationAlgorithmMethod_LocalBrightness:
2236 {
2237 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2238 break;
2239 }
2240 case NormalizationAlgorithmMethod_LocalContrast:
2241 {
2242 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2243 break;
2244 }
2245 default:
2246 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002247 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002248 }
2249 }
2250
2251 switch (normalizationDescriptor->dataLayout())
2252 {
2253 case DataLayout_NCHW:
2254 {
2255 desc.m_DataLayout = armnn::DataLayout::NCHW;
2256 break;
2257 }
2258 case DataLayout_NHWC:
2259 {
2260 desc.m_DataLayout = armnn::DataLayout::NHWC;
2261 break;
2262 }
2263 default:
2264 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002265 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002266 }
2267 }
2268
2269 desc.m_Alpha = normalizationDescriptor->alpha();
2270 desc.m_Beta = normalizationDescriptor->beta();
2271 desc.m_K = normalizationDescriptor->k();
2272 desc.m_NormSize = normalizationDescriptor->normSize();
2273
2274 return desc;
2275}
2276
2277void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2278{
2279 CHECK_LAYERS(graph, 0, layerIndex);
2280
2281 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2282
2283 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2284 CHECK_VALID_SIZE(inputs.size(), 1);
2285
2286 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2287 CHECK_VALID_SIZE(outputs.size(), 1);
2288
2289 auto outputInfo = ToTensorInfo(outputs[0]);
2290
2291 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2292 auto layerName = GetLayerName(graph, layerIndex);
2293
2294 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2295 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2296
2297 RegisterInputSlots(graph, layerIndex, layer);
2298 RegisterOutputSlots(graph, layerIndex, layer);
2299}
2300
Sadik Armagan8b42a382019-03-01 14:24:49 +00002301void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2302{
2303 CHECK_LAYERS(graph, 0, layerIndex);
2304 auto inputs = GetInputs(graph, layerIndex);
2305 CHECK_LOCATION();
2306 CHECK_VALID_SIZE(inputs.size(), 1);
2307
2308 auto outputs = GetOutputs(graph, layerIndex);
2309 CHECK_VALID_SIZE(outputs.size(), 1);
2310
2311 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002312
josh minor4a3c6102020-01-06 16:40:46 -06002313 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2314 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002315 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2316 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2317
2318 RegisterInputSlots(graph, layerIndex, layer);
2319 RegisterOutputSlots(graph, layerIndex, layer);
2320}
2321
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002322void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2323{
2324 CHECK_LAYERS(graph, 0, layerIndex);
2325
2326 auto inputs = GetInputs(graph, layerIndex);
2327 CHECK_VALID_SIZE(inputs.size(), 1);
2328
2329 auto outputs = GetOutputs(graph, layerIndex);
2330 CHECK_VALID_SIZE(outputs.size(), 1);
2331
2332 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2333
2334 auto fbBegin = fbDescriptor->begin();
2335 auto fbSize = fbDescriptor->size();
2336
2337 if (fbBegin->Length() != fbSize->Length())
2338 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002339 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2340 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002341 }
2342
2343 armnn::SliceDescriptor descriptor;
2344 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2345 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2346
2347 auto layerName = GetLayerName(graph, layerIndex);
2348 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2349
2350 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2351 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2352
2353 RegisterInputSlots(graph, layerIndex, layer);
2354 RegisterOutputSlots(graph, layerIndex, layer);
2355}
2356
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002357void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2358{
2359 CHECK_LAYERS(graph, 0, layerIndex);
2360
2361 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2362 CHECK_VALID_SIZE(inputs.size(), 1);
2363
2364 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2365 CHECK_VALID_SIZE(outputs.size(), 1);
2366
2367 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2368
2369 auto flatBufferBegin = flatBufferDescriptor->begin();
2370 auto flatBufferEnd = flatBufferDescriptor->end();
2371 auto flatBufferStride = flatBufferDescriptor->stride();
2372
2373 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2374 flatBufferBegin->Length() == flatBufferStride->Length()))
2375 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002376 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2377 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002378 }
2379
2380 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2381 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2382 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2383
2384 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2385 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2386 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2387 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2388 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2389 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2390 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2391
2392 auto layerName = GetLayerName(graph, layerIndex);
2393 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2394
2395 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2396 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2397
2398 RegisterInputSlots(graph, layerIndex, layer);
2399 RegisterOutputSlots(graph, layerIndex, layer);
2400}
2401
Conor Kennedyda1f9752019-03-01 14:37:12 +00002402void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2403{
2404 CHECK_LAYERS(graph, 0, layerIndex);
2405 auto inputs = GetInputs(graph, layerIndex);
2406 CHECK_LOCATION();
2407 CHECK_VALID_SIZE(inputs.size(), 2);
2408
2409 auto outputs = GetOutputs(graph, layerIndex);
2410 CHECK_VALID_SIZE(outputs.size(), 1);
2411
2412 auto layerName = GetLayerName(graph, layerIndex);
2413 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2414
2415 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2416 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2417
2418 RegisterInputSlots(graph, layerIndex, layer);
2419 RegisterOutputSlots(graph, layerIndex, layer);
2420}
2421
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002422void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2423{
2424 CHECK_LAYERS(graph, 0, layerIndex);
2425
2426 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2427 CHECK_VALID_SIZE(inputs.size(), 2);
2428
2429 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2430 CHECK_VALID_SIZE(outputs.size(), 1);
2431
Teresa Charlin52664732020-06-29 16:27:03 +01002432 armnn::GatherDescriptor descriptor;
2433 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2434
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002435 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002436 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002437
2438 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002439 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2440
2441 RegisterInputSlots(graph, layerIndex, layer);
2442 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002443}
2444
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002445void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2446{
2447 CHECK_LAYERS(graph, 0, layerIndex);
2448
2449 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2450 CHECK_VALID_SIZE(inputs.size(), 1);
2451
2452 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2453 CHECK_VALID_SIZE(outputs.size(), 1);
2454
2455 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2456 auto flatBufferAxis = flatBufferDescriptor->axis();
2457 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2458
2459 armnn::MeanDescriptor descriptor;
2460 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2461 descriptor.m_KeepDims = flatBufferKeepDims;
2462
2463 auto layerName = GetLayerName(graph, layerIndex);
2464 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2465
2466 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2467 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2468
2469 RegisterInputSlots(graph, layerIndex, layer);
2470 RegisterOutputSlots(graph, layerIndex, layer);
2471}
2472
Jim Flynn18ce3382019-03-08 11:08:30 +00002473void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2474{
2475 CHECK_LAYERS(graph, 0, layerIndex);
2476
2477 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2478 CHECK_VALID_SIZE(inputs.size(), 1);
2479
2480 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2481
2482 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2483 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2484 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2485 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2486 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2487 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2488
2489 // Check numViews and numDimensions corresponds to the ones already serialized ...
2490 // numViews == flatBufferViewSizes.size();
2491 // foreach: numDimensions == flatBufferViewSizes[x].size();
2492
2493 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2494 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2495 {
2496 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2497 {
2498 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2499 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2500 }
2501 }
2502
2503 auto layerName = GetLayerName(graph, layerIndex);
2504 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2505
2506 // I could have as many outputs as views ...
2507 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2508 {
2509 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2510 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2511 }
2512
2513 RegisterInputSlots(graph, layerIndex, layer);
2514 RegisterOutputSlots(graph, layerIndex, layer);
2515}
2516
Jim Flynn11af3752019-03-19 17:22:29 +00002517armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2518{
2519 armnn::LstmDescriptor desc;
2520
2521 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2522 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2523 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2524 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2525 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2526 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002527 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002528
2529 return desc;
2530}
2531
2532void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2533{
2534 CHECK_LAYERS(graph, 0, layerIndex);
2535
2536 auto inputs = GetInputs(graph, layerIndex);
2537 CHECK_VALID_SIZE(inputs.size(), 3);
2538
2539 auto outputs = GetOutputs(graph, layerIndex);
2540 CHECK_VALID_SIZE(outputs.size(), 4);
2541
2542 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2543 auto layerName = GetLayerName(graph, layerIndex);
2544 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2545 auto flatBufferInputParams = flatBufferLayer->inputParams();
2546
2547 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2548
2549 armnn::LstmInputParams lstmInputParams;
2550
2551 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2552 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2553 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2554 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2555 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2556 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2557 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2558 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2559 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2560
2561 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2562 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2563 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2564 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2565 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2566 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2567 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2568 lstmInputParams.m_CellBias = &cellBias;
2569 lstmInputParams.m_OutputGateBias = &outputGateBias;
2570
2571 armnn::ConstTensor inputToInputWeights;
2572 armnn::ConstTensor recurrentToInputWeights;
2573 armnn::ConstTensor cellToInputWeights;
2574 armnn::ConstTensor inputGateBias;
2575 if (!lstmDescriptor.m_CifgEnabled)
2576 {
2577 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2578 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2579 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2580 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2581
2582 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2583 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2584 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2585 lstmInputParams.m_InputGateBias = &inputGateBias;
2586 }
2587
2588 armnn::ConstTensor projectionWeights;
2589 armnn::ConstTensor projectionBias;
2590 if (lstmDescriptor.m_ProjectionEnabled)
2591 {
2592 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2593 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2594
2595 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2596 lstmInputParams.m_ProjectionBias = &projectionBias;
2597 }
2598
2599 armnn::ConstTensor cellToForgetWeights;
2600 armnn::ConstTensor cellToOutputWeights;
2601 if (lstmDescriptor.m_PeepholeEnabled)
2602 {
2603 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2604 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2605
2606 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2607 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2608 }
2609
Jan Eilersf8c62972019-07-17 11:07:49 +01002610 armnn::ConstTensor inputLayerNormWeights;
2611 armnn::ConstTensor forgetLayerNormWeights;
2612 armnn::ConstTensor cellLayerNormWeights;
2613 armnn::ConstTensor outputLayerNormWeights;
2614 if (lstmDescriptor.m_LayerNormEnabled)
2615 {
2616 if (!lstmDescriptor.m_CifgEnabled)
2617 {
2618 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2619 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2620 }
2621 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2622 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2623 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2624
2625 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2626 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2627 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2628 }
2629
Jim Flynn11af3752019-03-19 17:22:29 +00002630 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2631
2632 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2633 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2634
2635 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2636 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2637
2638 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2639 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2640
2641 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2642 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2643
2644 RegisterInputSlots(graph, layerIndex, layer);
2645 RegisterOutputSlots(graph, layerIndex, layer);
2646}
2647
James Conroy8d333182020-05-13 10:27:58 +01002648armnn::QLstmDescriptor Deserializer::GetQLstmDescriptor(Deserializer::QLstmDescriptorPtr qLstmDescriptor)
2649{
2650 armnn::QLstmDescriptor desc;
2651
2652 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2653 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2654 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2655 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2656
2657 desc.m_CellClip = qLstmDescriptor->cellClip();
2658 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2659
2660 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2661 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2662 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2663 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2664
2665 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2666 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2667
2668 return desc;
2669}
2670
2671void Deserializer::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
2672{
2673 CHECK_LAYERS(graph, 0, layerIndex);
2674
2675 auto inputs = GetInputs(graph, layerIndex);
2676 CHECK_VALID_SIZE(inputs.size(), 3);
2677
2678 auto outputs = GetOutputs(graph, layerIndex);
2679 CHECK_VALID_SIZE(outputs.size(), 3);
2680
2681 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2682 auto layerName = GetLayerName(graph, layerIndex);
2683 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2684 auto flatBufferInputParams = flatBufferLayer->inputParams();
2685
2686 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2687 armnn::LstmInputParams qLstmInputParams;
2688
2689 // Mandatory params
2690 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2691 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2692 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2693 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2694 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2695 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2696 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2697 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2698 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2699
2700 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2701 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2702 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2703 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2704 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2705 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2706 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2707 qLstmInputParams.m_CellBias = &cellBias;
2708 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2709
2710 // Optional CIFG params
2711 armnn::ConstTensor inputToInputWeights;
2712 armnn::ConstTensor recurrentToInputWeights;
2713 armnn::ConstTensor inputGateBias;
2714
2715 if (!qLstmDescriptor.m_CifgEnabled)
2716 {
2717 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2718 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2719 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2720
2721 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2722 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2723 qLstmInputParams.m_InputGateBias = &inputGateBias;
2724 }
2725
2726 // Optional projection params
2727 armnn::ConstTensor projectionWeights;
2728 armnn::ConstTensor projectionBias;
2729
2730 if (qLstmDescriptor.m_ProjectionEnabled)
2731 {
2732 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2733 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2734
2735 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2736 qLstmInputParams.m_ProjectionBias = &projectionBias;
2737 }
2738
2739 // Optional peephole params
2740 armnn::ConstTensor cellToInputWeights;
2741 armnn::ConstTensor cellToForgetWeights;
2742 armnn::ConstTensor cellToOutputWeights;
2743
2744 if (qLstmDescriptor.m_PeepholeEnabled)
2745 {
2746 if (!qLstmDescriptor.m_CifgEnabled)
2747 {
2748 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2749 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2750 }
2751
2752 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2753 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2754
2755 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2756 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2757 }
2758
2759 // Optional layer norm params
2760 armnn::ConstTensor inputLayerNormWeights;
2761 armnn::ConstTensor forgetLayerNormWeights;
2762 armnn::ConstTensor cellLayerNormWeights;
2763 armnn::ConstTensor outputLayerNormWeights;
2764
2765 if (qLstmDescriptor.m_LayerNormEnabled)
2766 {
2767 if (!qLstmDescriptor.m_CifgEnabled)
2768 {
2769 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2770 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2771 }
2772
2773 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2774 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2775 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2776
2777 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2778 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2779 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2780 }
2781
2782 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2783
2784 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2785 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2786
2787 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2788 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2789
2790 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2791 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2792
2793 RegisterInputSlots(graph, layerIndex, layer);
2794 RegisterOutputSlots(graph, layerIndex, layer);
2795}
2796
Jan Eilers5b01a892019-07-23 09:47:43 +01002797void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2798{
2799 CHECK_LAYERS(graph, 0, layerIndex);
2800
2801 auto inputs = GetInputs(graph, layerIndex);
2802 CHECK_VALID_SIZE(inputs.size(), 3);
2803
2804 auto outputs = GetOutputs(graph, layerIndex);
2805 CHECK_VALID_SIZE(outputs.size(), 2);
2806
2807 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2808 auto layerName = GetLayerName(graph, layerIndex);
2809 auto flatBufferInputParams = flatBufferLayer->inputParams();
2810
2811 armnn::QuantizedLstmInputParams lstmInputParams;
2812
2813 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2814 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2815 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2816 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2817 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2818 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2819 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2820 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2821 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2822 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2823 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2824 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2825
2826 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2827 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2828 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2829 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2830 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2831 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2832 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2833 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2834 lstmInputParams.m_InputGateBias = &inputGateBias;
2835 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2836 lstmInputParams.m_CellBias = &cellBias;
2837 lstmInputParams.m_OutputGateBias = &outputGateBias;
2838
2839 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2840
2841 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2842 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2843
2844 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2845 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2846
2847 RegisterInputSlots(graph, layerIndex, layer);
2848 RegisterOutputSlots(graph, layerIndex, layer);
2849}
2850
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002851void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2852{
2853 CHECK_LAYERS(graph, 0, layerIndex);
2854
2855 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2856 CHECK_VALID_SIZE(inputs.size(), 1);
2857
2858 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2859 CHECK_VALID_SIZE(outputs.size(), 1);
2860
2861 const std::string layerName = GetLayerName(graph, layerIndex);
2862 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2863
2864 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2865 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2866
2867 RegisterInputSlots(graph, layerIndex, layer);
2868 RegisterOutputSlots(graph, layerIndex, layer);
2869}
2870
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002871void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2872{
2873 CHECK_LAYERS(graph, 0, layerIndex);
2874
2875 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2876 CHECK_VALID_SIZE(inputs.size(), 2);
2877
2878 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2879 CHECK_VALID_SIZE(outputs.size(), 1);
2880
2881 const std::string layerName = GetLayerName(graph, layerIndex);
2882 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2883
2884 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2885 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2886
2887 RegisterInputSlots(graph, layerIndex, layer);
2888 RegisterOutputSlots(graph, layerIndex, layer);
2889}
2890
Sadik Armaganeff363d2019-04-05 15:25:46 +01002891void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2892{
2893 CHECK_LAYERS(graph, 0, layerIndex);
2894 auto inputs = GetInputs(graph, layerIndex);
2895 CHECK_LOCATION();
2896 CHECK_VALID_SIZE(inputs.size(), 2);
2897
2898 auto outputs = GetOutputs(graph, layerIndex);
2899 CHECK_VALID_SIZE(outputs.size(), 2);
2900
2901 auto layerName = GetLayerName(graph, layerIndex);
2902 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2903
2904 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2905 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2906
2907 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2908 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2909
2910 RegisterInputSlots(graph, layerIndex, layer);
2911 RegisterOutputSlots(graph, layerIndex, layer);
2912}
2913
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002914void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2915{
2916 CHECK_LAYERS(graph, 0, layerIndex);
2917 auto inputs = GetInputs(graph, layerIndex);
2918 CHECK_LOCATION();
2919 CHECK_VALID_SIZE(inputs.size(), 2);
2920
2921 auto outputs = GetOutputs(graph, layerIndex);
2922 CHECK_VALID_SIZE(outputs.size(), 1);
2923
2924 auto layerName = GetLayerName(graph, layerIndex);
2925 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2926
2927 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2928 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2929
2930 RegisterInputSlots(graph, layerIndex, layer);
2931 RegisterOutputSlots(graph, layerIndex, layer);
2932}
2933
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002934void Deserializer::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
2935{
2936 CHECK_LAYERS(graph, 0, layerIndex);
2937
2938 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2939
2940 auto inputs = GetInputs(graph, layerIndex);
2941 CHECK_VALID_SIZE(inputs.size(), 1);
2942
2943 auto outputs = GetOutputs(graph, layerIndex);
2944 CHECK_VALID_SIZE(outputs.size(), 1);
2945 auto outputInfo = ToTensorInfo(outputs[0]);
2946
2947 auto layerName = GetLayerName(graph, layerIndex);
2948 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2949
2950 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2951 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2952
2953 RegisterInputSlots(graph, layerIndex, layer);
2954 RegisterOutputSlots(graph, layerIndex, layer);
2955}
2956
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002957void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2958{
2959 CHECK_LAYERS(graph, 0, layerIndex);
2960
2961 auto inputs = GetInputs(graph, layerIndex);
2962 CHECK_VALID_SIZE(inputs.size(), 1);
2963
2964 auto outputs = GetOutputs(graph, layerIndex);
2965 CHECK_VALID_SIZE(outputs.size(), 1);
2966
2967 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2968 auto layerName = GetLayerName(graph, layerIndex);
2969 auto serializerDescriptor = serializerLayer->descriptor();
2970
2971 armnn::TransposeConvolution2dDescriptor descriptor;
2972 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2973 descriptor.m_PadRight = serializerDescriptor->padRight();
2974 descriptor.m_PadTop = serializerDescriptor->padTop();
2975 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2976 descriptor.m_StrideX = serializerDescriptor->strideX();
2977 descriptor.m_StrideY = serializerDescriptor->strideY();;
2978 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2979 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2980
2981 // weights & biases
2982 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2983 armnn::Optional<armnn::ConstTensor> optionalBiases;
2984 if (descriptor.m_BiasEnabled)
2985 {
2986 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2987 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2988 }
2989
2990 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2991 weights,
2992 optionalBiases,
2993 layerName.c_str());
2994
2995 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2996 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2997
2998 RegisterInputSlots(graph, layerIndex, layer);
2999 RegisterOutputSlots(graph, layerIndex, layer);
3000}
3001
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003002void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
3003{
3004 CHECK_LAYERS(graph, 0, layerIndex);
3005 auto inputs = GetInputs(graph, layerIndex);
3006
3007 auto outputs = GetOutputs(graph, layerIndex);
3008 CHECK_VALID_SIZE(outputs.size(), 1);
3009
3010 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3011 unsigned int axis = flatBufferDescriptor->axis();
3012 unsigned int numInputs = flatBufferDescriptor->numInputs();
3013 CHECK_VALID_SIZE(inputs.size(), numInputs);
3014
3015 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3016 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3017 flatBufferInputShape->begin() + flatBufferInputShape->size());
3018
3019 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3020 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3021
3022 for (unsigned int i=0; i<inputs.size(); ++i)
3023 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003024 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003025 if (descriptor.m_InputShape != inputShape)
3026 {
3027 std::stringstream ss;
3028 ss << "Shape of input "
3029 << i
3030 << " "
3031 << inputShape
3032 << " does not equal defined input shape "
3033 << descriptor.m_InputShape
3034 << ": "
3035 << CHECK_LOCATION().AsString();
3036 throw ParseException(ss.str());
3037 }
3038 }
3039
3040 auto layerName = GetLayerName(graph, layerIndex);
3041 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3042
3043 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3044 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3045
3046 RegisterInputSlots(graph, layerIndex, layer);
3047 RegisterOutputSlots(graph, layerIndex, layer);
3048}
3049
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003050void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
3051{
3052 CHECK_LAYERS(graph, 0, layerIndex);
3053
3054 auto inputs = GetInputs(graph, layerIndex);
3055 auto outputs = GetOutputs(graph, layerIndex);
3056
3057 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3058 auto fbDescriptor = fbLayer->descriptor();
3059
3060 armnn::StandInDescriptor descriptor;
3061 descriptor.m_NumInputs = fbDescriptor->numInputs();
3062 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3063
3064 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3065 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3066
3067 const std::string layerName = GetLayerName(graph, layerIndex);
3068 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3069
3070 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3071 {
3072 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3073 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3074 }
3075
3076 RegisterInputSlots(graph, layerIndex, layer);
3077 RegisterOutputSlots(graph, layerIndex, layer);
3078}
3079
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003080} // namespace armnnDeserializer