blob: 36beebc1cda69692931f6968b29cacb88bd849c7 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000018
Kevin May43a799c2019-02-08 16:31:42 +000019#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000020#include <VerificationHelpers.hpp>
21
22#include <boost/filesystem.hpp>
23#include <boost/format.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010024#include <boost/numeric/conversion/cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038namespace
39{
Kevin May43a799c2019-02-08 16:31:42 +000040
41const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
42
Derek Lamberti0028d1b2019-02-20 13:57:42 +000043 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000044 unsigned int layersIndex,
45 const CheckLocation& location)
46{
47 if (graph->layers() == nullptr)
48 {
49 throw ParseException(
50 boost::str(
51 boost::format("%1% was called with invalid (null) graph. "
52 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
53 "layers:%2% at %3%") %
54 location.m_Function %
55 layersIndex %
56 location.FileLine()));
57 }
58 else if (layersIndex >= graph->layers()->size())
59 {
60 throw ParseException(
61 boost::str(
62 boost::format("%1% was called with an invalid layers index. "
63 "layers:%2% at %3%") %
64 location.m_Function %
65 layersIndex %
66 location.FileLine()));
67 }
68}
69
Derek Lamberti0028d1b2019-02-20 13:57:42 +000070void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000071 unsigned int layersIndex,
72 unsigned int layerIndex,
73 const CheckLocation& location)
74{
75 if (graph->layers() == nullptr)
76 {
77 throw ParseException(
78 boost::str(
79 boost::format("%1% was called with invalid (null) graph. "
80 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000081 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000082 location.m_Function %
83 layersIndex %
84 location.FileLine()));
85 }
86 else if (layersIndex >= graph->layers()->size())
87 {
88 throw ParseException(
89 boost::str(
90 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000091 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000092 location.m_Function %
93 layersIndex %
94 location.FileLine()));
95 }
96 else if (layerIndex >= graph->layers()[layersIndex].size()
97 && layerIndex != VIRTUAL_LAYER_ID)
98 {
99 throw ParseException(
100 boost::str(
101 boost::format("%1% was called with an invalid layer index. "
102 "layers:%2% layer:%3% at %4%") %
103 location.m_Function %
104 layersIndex %
105 layerIndex %
106 location.FileLine()));
107 }
108}
109
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000110void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000111 const CheckLocation& location)
112{
113 if (rawPtr == nullptr)
114 {
115 throw ParseException(
116 boost::str(
117 boost::format("%1% was called with a null tensor pointer. "
118 "at %2%") %
119 location.m_Function %
120 location.FileLine()));
121
122 }
123}
124
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000125void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000126 const CheckLocation& location)
127{
128 if (rawPtr == nullptr)
129 {
130 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
131 location.m_Function %
132 location.FileLine()));
133 }
134}
135
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000136void CheckConstTensorSize(const unsigned int constTensorSize,
137 const unsigned int tensorSize,
138 const CheckLocation& location)
139{
140 if (constTensorSize != tensorSize)
141 {
142 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
143 location.m_Function %
144 location.FileLine()));
145 }
146}
147
Kevin May43a799c2019-02-08 16:31:42 +0000148#define CHECK_TENSOR_PTR(TENSOR_PTR) \
149 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
150
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000151#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
152 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
153
Mike Kellya0766c32019-02-19 17:22:07 +0000154#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
155 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
156
Kevin May43a799c2019-02-08 16:31:42 +0000157#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
158 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
159
160#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
161 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
162}
163
Saoirse Stewart263829c2019-02-19 15:54:14 +0000164bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
165{
166 const unsigned int actualSize = actual.GetNumDimensions();
167 if (actualSize != expected.size())
168 {
169 return false;
170 }
171
172 for (unsigned int i = 0u; i < actualSize; i++)
173 {
174 if (actual[i] != static_cast<unsigned int>(expected[i]))
175 {
176 return false;
177 }
178 }
179
180 return true;
181}
182
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000183Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000184: m_Network(nullptr, nullptr),
185//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000186m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000187{
188 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100189 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000190 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000191 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100192 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000193 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000194 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100195 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100196 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000197 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000198 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100199 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000200 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000201 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000202 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000203 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600204 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000205 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000206 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000207 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000208 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000209 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100210 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000211 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100212 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000213 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000214 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000215 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
216 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100217 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100218 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000220 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000221 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000222 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100224 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
James Conroy8d333182020-05-13 10:27:58 +0100225 m_ParserFunctions[Layer_QLstmLayer] = &Deserializer::ParseQLstm;
Derek Lamberti87acb272019-03-27 16:51:31 +0000226 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100227 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000228 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000229 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100230 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000231 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100232 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000233 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000234 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100235 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000236 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100237 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100238 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000239 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000240 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100241 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100242 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000243 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000244}
245
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000246Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000247{
248 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
249
250 switch(layerType)
251 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100252 case Layer::Layer_AbsLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000254 case Layer::Layer_ActivationLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000256 case Layer::Layer_AdditionLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100258 case Layer::Layer_ArgMinMaxLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000260 case Layer::Layer_BatchToSpaceNdLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000262 case Layer::Layer_BatchNormalizationLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100264 case Layer::Layer_ComparisonLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100266 case Layer::Layer_ConcatLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000268 case Layer::Layer_ConstantLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000270 case Layer::Layer_Convolution2dLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100272 case Layer::Layer_DepthToSpaceLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000274 case Layer::Layer_DepthwiseConvolution2dLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000276 case Layer::Layer_DequantizeLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000278 case Layer::Layer_DetectionPostProcessLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000280 case Layer::Layer_DivisionLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000282 case Layer::Layer_EqualLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000284 case Layer::Layer_FullyConnectedLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000286 case Layer::Layer_FloorLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000288 case Layer::Layer_GatherLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000290 case Layer::Layer_GreaterLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000292 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000293 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100294 case Layer::Layer_InstanceNormalizationLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000296 case Layer::Layer_L2NormalizationLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100298 case Layer::Layer_LogSoftmaxLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000300 case Layer::Layer_LstmLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000302 case Layer::Layer_MeanLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000304 case Layer::Layer_MinimumLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000306 case Layer::Layer_MaximumLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100308 case Layer::Layer_MergeLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000310 case Layer::Layer_MergerLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000312 case Layer::Layer_MultiplicationLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000314 case Layer::Layer_NormalizationLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000316 case Layer::Layer_OutputLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000318 case Layer::Layer_PadLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000320 case Layer::Layer_PermuteLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000322 case Layer::Layer_Pooling2dLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100324 case Layer::Layer_PreluLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100326 case Layer::Layer_QLstmLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000328 case Layer::Layer_QuantizeLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100330 case Layer::Layer_QuantizedLstmLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000332 case Layer::Layer_ReshapeLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000334 case Layer::Layer_ResizeBilinearLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100336 case Layer::Layer_ResizeLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000338 case Layer::Layer_RsqrtLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100340 case Layer::Layer_SliceLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000342 case Layer::Layer_SoftmaxLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000344 case Layer::Layer_SpaceToBatchNdLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100346 case Layer::Layer_SpaceToDepthLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000348 case Layer::Layer_SplitterLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100350 case Layer::Layer_StackLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100352 case Layer::Layer_StandInLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000354 case Layer::Layer_StridedSliceLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000356 case Layer::Layer_SubtractionLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100358 case Layer::Layer_SwitchLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100360 case Layer::Layer_TransposeConvolution2dLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000362 case Layer::Layer_TransposeLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000364 case Layer::Layer_NONE:
365 default:
366 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100367 boost::format("Layer type %1% not recognized") %
368 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000369 }
370}
371
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000372std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
373{
374 auto layer = GetBaseLayer(graph, index);
375 assert(layer);
376 return layer->layerName()->str();
377}
378
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000379int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000380{
381 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
382
383 if (layerType == Layer::Layer_InputLayer)
384 {
385 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
386 }
387 else if ( layerType == Layer::Layer_OutputLayer )
388 {
389 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
390 }
391 return 0;
392}
393
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000394armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000395{
396 switch (dataLayout)
397 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000398 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000399 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000400 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000401 default:
402 return armnn::DataLayout::NCHW;
403 }
404}
405
Mike Kellyaf484012019-02-20 16:53:11 +0000406armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
407{
408 switch (function)
409 {
410 case armnnSerializer::ActivationFunction_Sigmoid:
411 return armnn::ActivationFunction::Sigmoid;
412 case armnnSerializer::ActivationFunction_TanH:
413 return armnn::ActivationFunction::TanH;
414 case armnnSerializer::ActivationFunction_Linear:
415 return armnn::ActivationFunction::Linear;
416 case armnnSerializer::ActivationFunction_ReLu:
417 return armnn::ActivationFunction::ReLu;
418 case armnnSerializer::ActivationFunction_BoundedReLu:
419 return armnn::ActivationFunction::BoundedReLu;
420 case armnnSerializer::ActivationFunction_LeakyReLu:
421 return armnn::ActivationFunction::LeakyReLu;
422 case armnnSerializer::ActivationFunction_Abs:
423 return armnn::ActivationFunction::Abs;
424 case armnnSerializer::ActivationFunction_Sqrt:
425 return armnn::ActivationFunction::Sqrt;
426 case armnnSerializer::ActivationFunction_Square:
427 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000428 case armnnSerializer::ActivationFunction_Elu:
429 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000430 case armnnSerializer::ActivationFunction_HardSwish:
431 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000432 default:
433 return armnn::ActivationFunction::Sigmoid;
434 }
435}
436
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100437armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
438{
439 switch (function)
440 {
441 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
442 return armnn::ArgMinMaxFunction::Max;
443 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
444 default:
445 return armnn::ArgMinMaxFunction::Min;
446 }
447}
448
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100449armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
450{
451 switch (operation)
452 {
453 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
454 return armnn::ComparisonOperation::Equal;
455 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
456 return armnn::ComparisonOperation::Greater;
457 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
458 return armnn::ComparisonOperation::GreaterOrEqual;
459 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
460 return armnn::ComparisonOperation::Less;
461 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
462 return armnn::ComparisonOperation::LessOrEqual;
463 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
464 default:
465 return armnn::ComparisonOperation::NotEqual;
466 }
467}
468
josh minor4a3c6102020-01-06 16:40:46 -0600469armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
470{
471 switch (operation)
472 {
473 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
474 return armnn::UnaryOperation::Abs;
475 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
476 return armnn::UnaryOperation::Rsqrt;
477 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
478 return armnn::UnaryOperation::Sqrt;
479 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
480 return armnn::UnaryOperation::Exp;
481 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
482 return armnn::UnaryOperation::Neg;
483 default:
484 throw armnn::InvalidArgumentException("Unary operation unknown");
485 }
486}
487
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100488armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
489{
490 switch (method)
491 {
492 case armnnSerializer::ResizeMethod_NearestNeighbor:
493 return armnn::ResizeMethod::NearestNeighbor;
494 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000495 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100496 default:
497 return armnn::ResizeMethod::NearestNeighbor;
498 }
499}
500
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000501armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000502{
503 armnn::DataType type;
504 CHECK_TENSOR_PTR(tensorPtr);
505
506 switch (tensorPtr->dataType())
507 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000508 case DataType_QAsymmS8:
509 type = armnn::DataType::QAsymmS8;
510 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000511 case DataType_QSymmS8:
512 type = armnn::DataType::QSymmS8;
513 break;
Kevin May43a799c2019-02-08 16:31:42 +0000514 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000515 case DataType_QAsymmU8:
516 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000517 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000518 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000519 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000520 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000521 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000522 case DataType_Signed32:
523 type = armnn::DataType::Signed32;
524 break;
Kevin May43a799c2019-02-08 16:31:42 +0000525 case DataType_Float32:
526 type = armnn::DataType::Float32;
527 break;
528 case DataType_Float16:
529 type = armnn::DataType::Float16;
530 break;
531 case DataType_Boolean:
532 type = armnn::DataType::Boolean;
533 break;
534 default:
535 {
536 CheckLocation location = CHECK_LOCATION();
537 throw ParseException(
538 boost::str(
539 boost::format("Unsupported data type %1% = %2%. %3%") %
540 tensorPtr->dataType() %
541 EnumNameDataType(tensorPtr->dataType()) %
542 location.AsString()));
543 }
544 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000545
Kevin May43a799c2019-02-08 16:31:42 +0000546
547 auto dimensions = tensorPtr->dimensions();
548 unsigned int size = dimensions->size();
549 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
550
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000551 auto quantizationScales = tensorPtr->quantizationScales();
552
553 if (quantizationScales)
554 {
555 unsigned int quantizationScalesSize = quantizationScales->size();
556 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
557 unsigned int quantizationDim = tensorPtr->quantizationDim();
558 armnn::TensorInfo result(size,
559 outputDims.data(),
560 type,
561 scales,
562 quantizationDim);
563 return result;
564 }
565
566 float quantizationScale = tensorPtr->quantizationScale();
567 int32_t quantizationOffset = tensorPtr->quantizationOffset();
568
Kevin May43a799c2019-02-08 16:31:42 +0000569 // two statements (on purpose) for easier debugging:
570 armnn::TensorInfo result(size,
571 outputDims.data(),
572 type,
573 quantizationScale,
574 quantizationOffset);
575 return result;
576}
577
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000578armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000579{
580 CHECK_CONST_TENSOR_PTR(constTensorPtr);
581 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
582
583 switch (constTensorPtr->data_type())
584 {
585 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000586 {
587 auto byteData = constTensorPtr->data_as_ByteData()->data();
588 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
589 return armnn::ConstTensor(tensorInfo, byteData->data());
590 }
Mike Kellya0766c32019-02-19 17:22:07 +0000591 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000592 {
593 auto shortData = constTensorPtr->data_as_ShortData()->data();
594 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
595 return armnn::ConstTensor(tensorInfo, shortData->data());
596 }
Mike Kellya0766c32019-02-19 17:22:07 +0000597 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000598 {
599 auto intData = constTensorPtr->data_as_IntData()->data();
600 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
601 return armnn::ConstTensor(tensorInfo, intData->data());
602 }
Mike Kellya0766c32019-02-19 17:22:07 +0000603 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000604 {
605 auto longData = constTensorPtr->data_as_LongData()->data();
606 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
607 return armnn::ConstTensor(tensorInfo, longData->data());
608 }
Mike Kellya0766c32019-02-19 17:22:07 +0000609 default:
610 {
611 CheckLocation location = CHECK_LOCATION();
612 throw ParseException(
613 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
614 constTensorPtr->data_type() %
615 EnumNameConstTensorData(constTensorPtr->data_type()) %
616 location.AsString()));
617 }
618 }
619}
620
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000621Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000622 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000623{
624 CHECK_LAYERS(graphPtr, 0, layerIndex);
625 auto layer = GetBaseLayer(graphPtr, layerIndex);
626 const auto& numInputs = layer->inputSlots()->size();
627
628 TensorRawPtrVector result(numInputs);
629
630 for (unsigned int i=0; i<numInputs; ++i)
631 {
632 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
633 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
634 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
635 }
636 return result;
637}
638
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000639Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000640 unsigned int layerIndex)
641{
642 CHECK_LAYERS(graphPtr, 0, layerIndex);
643 auto layer = GetBaseLayer(graphPtr, layerIndex);
644 const auto& numOutputs = layer->outputSlots()->size();
645
646 TensorRawPtrVector result(numOutputs);
647
648 for (unsigned int i=0; i<numOutputs; ++i)
649 {
650 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
651 }
652 return result;
653}
654
Derek Lamberti8ddae332019-02-21 16:29:43 +0000655void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000656{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000657 CHECK_LAYERS(graph, 0, layerIndex);
658 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000659 throw ParseException(
660 boost::str(
661 boost::format("Layer not supported. "
662 "layerIndex: %1% "
663 "layerName: %2% / %3%") %
664 layerIndex %
665 layerName %
666 CHECK_LOCATION().AsString()));
667}
668
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000669void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000670{
671 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 m_InputBindings.clear();
673 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000674}
675
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000676IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000677{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000678 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000679}
680
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000681IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000682{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000683 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000684}
685
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000686void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000687{
688 delete parser;
689}
690
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000691INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000692{
693 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000694 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
695 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000696}
697
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000698armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000699{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000700 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000701 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
702 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
703 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000704}
705
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000706Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000707{
708 if (binaryContent == nullptr)
709 {
710 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
711 CHECK_LOCATION().AsString()));
712 }
713 flatbuffers::Verifier verifier(binaryContent, len);
714 if (verifier.VerifyBuffer<SerializedGraph>() == false)
715 {
716 throw ParseException(
717 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
718 "flatbuffers format. size:%1% %2%") %
719 len %
720 CHECK_LOCATION().AsString()));
721 }
722 return GetSerializedGraph(binaryContent);
723}
724
Derek Lamberti8ddae332019-02-21 16:29:43 +0000725INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000726{
727 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100728 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000729 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000730 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000731 {
732 if (layer->layer_type() != Layer_InputLayer &&
733 layer->layer_type() != Layer_OutputLayer)
734 {
735 // lookup and call the parser function
736 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000737 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000738 }
739 ++layerIndex;
740 }
741
Derek Lamberti8ddae332019-02-21 16:29:43 +0000742 SetupInputLayers(graph);
743 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000744
745 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100746 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000747 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100748 Connections& connections = graphIt.second;
749 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000750 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100751 const unsigned int outputSlotIndex = outputIt.first;
752 IOutputSlot* outputSlot = outputIt.second;
753 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000754 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100755 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000756 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100757 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000758 }
Kevin May43a799c2019-02-08 16:31:42 +0000759 }
760 }
761 }
762
763 return std::move(m_Network);
764}
765
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000766BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000767 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000768{
Jan Eilers8eb25602020-03-09 12:13:48 +0000769 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000770 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000771 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000772 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000773 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000774 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000775 }
776 }
777 throw ParseException(
778 boost::str(
779 boost::format("No input binding found for layer:%1% / %2%") %
780 name %
781 CHECK_LOCATION().AsString()));
782}
783
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000784BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000785 const std::string& name) const
786{
Jan Eilers8eb25602020-03-09 12:13:48 +0000787 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000788 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000789 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000790 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000791 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000792 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000793 }
794 }
795 throw ParseException(
796 boost::str(
797 boost::format("No output binding found for layer:%1% / %2%") %
798 name %
799 CHECK_LOCATION().AsString()));
800}
801
Tee Jungaa920c52019-11-05 10:48:25 +0000802unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
803{
804 for (unsigned int i = 0; i < graph->layers()->size(); i++)
805 {
806 auto layer = graph->layers()->Get(i);
807 if (layer->layer_type() == Layer::Layer_InputLayer)
808 {
809 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
810 if (layerBindingId == targetId)
811 {
812 return i;
813 }
814 }
815 }
816 throw ParseException("Input layer with given layerBindingId not found");
817}
818
819unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
820{
821 for (unsigned int i = 0; i < graph->layers()->size(); i++)
822 {
823 auto layer = graph->layers()->Get(i);
824 if (layer->layer_type() == Layer::Layer_OutputLayer)
825 {
826 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
827 if (layerBindingId == targetId)
828 {
829 return i;
830 }
831 }
832 }
833 throw ParseException("Output layer with given layerBindingId not found");
834}
835
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100836unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
837{
838 for (unsigned int i = 0; i < graph->layers()->size(); i++)
839 {
840 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
841 if (layer->index() == targetIndex)
842 {
843 return i;
844 }
845 }
846 throw ParseException("Layer with given index not found");
847}
848
Tee Jungaa920c52019-11-05 10:48:25 +0000849Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
850{
851 Deserializer::FeatureVersions versions;
852
853 if (graph->featureVersions())
854 {
855 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
856 }
857
858 return versions;
859}
860
Derek Lamberti8ddae332019-02-21 16:29:43 +0000861void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000862{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000863 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100864 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000865 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100866 m_InputBindings.reserve(numInputs);
867
868 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000869 {
Tee Jungaa920c52019-11-05 10:48:25 +0000870 unsigned int inputLayerIndex = 0xFFFFFFFF;
871 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
872 {
873 const unsigned int inputId = boost::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
874 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
875 }
876 else
877 {
878 const int inputId = graph->inputIds()->Get(i);
879 inputLayerIndex = GetInputLayerInVector(graph, inputId);
880 }
881
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100882 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000883
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100884 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
885 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100886 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000887
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100888 IConnectableLayer* inputLayer =
889 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000890
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100891 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
892 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
893 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
894
Derek Lamberti8ddae332019-02-21 16:29:43 +0000895 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100896 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000897 }
898}
899
Derek Lamberti8ddae332019-02-21 16:29:43 +0000900void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000901{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000902 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100903 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000904 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100905 m_OutputBindings.reserve(numOutputs);
906
907 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000908 {
Tee Jungaa920c52019-11-05 10:48:25 +0000909 unsigned int outputLayerIndex = 0xFFFFFFFF;
910 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
911 {
912 const unsigned int outputId = boost::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
913 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
914 }
915 else
916 {
917 const int outputId = graph->outputIds()->Get(i);
918 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
919 }
920
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100921 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000922
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100923 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
924 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100925 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000926
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100927 IConnectableLayer* outputLayer =
928 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000929
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100930 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
931
932 unsigned int sourceLayerIndex =
933 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
934 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
935 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
936
Derek Lamberti8ddae332019-02-21 16:29:43 +0000937 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100938 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000939 }
940}
941
Derek Lamberti8ddae332019-02-21 16:29:43 +0000942void Deserializer::RegisterOutputSlots(GraphPtr graph,
943 uint32_t layerIndex,
944 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000945{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000946 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100947 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100948 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
949 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000950 {
951 throw ParseException(
952 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
953 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100954 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000955 layer->GetNumOutputSlots() %
956 layerIndex %
957 CHECK_LOCATION().AsString()));
958 }
959
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100960 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000961 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100962 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
963 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
964 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
965 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000966 }
967}
968
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969void Deserializer::RegisterInputSlots(GraphPtr graph,
970 uint32_t layerIndex,
971 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000972{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000973 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100974 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100975 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
976 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000977 {
978 throw ParseException(
979 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
980 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100981 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000982 layer->GetNumInputSlots() %
983 layerIndex %
984 CHECK_LOCATION().AsString()));
985 }
986
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100987 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000988 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100989 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
990 auto fbConnection = fbInputSlot->connection();
991 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
992 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000993 }
994}
995
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000996void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
997 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100998 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000999{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001000 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001001 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001002 m_GraphConnections[sourceLayerIndex] = Connections();
1003 }
1004
1005 Connections& connections = m_GraphConnections[sourceLayerIndex];
1006 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1007 {
1008 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001009 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001010 else
1011 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001012 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001013 }
1014}
Kevin May43a799c2019-02-08 16:31:42 +00001015
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001016void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001017 uint32_t outputSlotIndex,
1018 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001019{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001020 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1021 {
1022 m_GraphConnections[sourceLayerIndex] = Connections();
1023 }
1024
1025 Connections& connections = m_GraphConnections[sourceLayerIndex];
1026 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1027 {
1028 throw ParseException("Same output slot index processed twice");
1029 }
1030
1031 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001032}
1033
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001034void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1035{
1036 CHECK_LAYERS(graph, 0, layerIndex);
1037 auto inputs = GetInputs(graph, layerIndex);
1038 CHECK_LOCATION();
1039 CHECK_VALID_SIZE(inputs.size(), 1);
1040
1041 auto outputs = GetOutputs(graph, layerIndex);
1042 CHECK_VALID_SIZE(outputs.size(), 1);
1043
1044 auto layerName = GetLayerName(graph, layerIndex);
1045
josh minor4a3c6102020-01-06 16:40:46 -06001046 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1047 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001048 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1049 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1050
1051 RegisterInputSlots(graph, layerIndex, layer);
1052 RegisterOutputSlots(graph, layerIndex, layer);
1053}
1054
Derek Lamberti8ddae332019-02-21 16:29:43 +00001055void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001056{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001057 CHECK_LAYERS(graph, 0, layerIndex);
1058 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001059 CHECK_LOCATION();
1060 CHECK_VALID_SIZE(inputs.size(), 1);
1061
Derek Lamberti8ddae332019-02-21 16:29:43 +00001062 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001063 CHECK_VALID_SIZE(outputs.size(), 1);
1064
Derek Lamberti8ddae332019-02-21 16:29:43 +00001065 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001066 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001067 auto serializerDescriptor = serializerLayer->descriptor();
1068
1069 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001070 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001071 descriptor.m_A = serializerDescriptor->a();
1072 descriptor.m_B = serializerDescriptor->b();
1073
1074 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1075 layerName.c_str());
1076 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1077 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1078
Derek Lamberti8ddae332019-02-21 16:29:43 +00001079 RegisterInputSlots(graph, layerIndex, layer);
1080 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001081}
1082
Derek Lamberti8ddae332019-02-21 16:29:43 +00001083void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001084{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001085 CHECK_LAYERS(graph, 0, layerIndex);
1086 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001087 CHECK_LOCATION();
1088 CHECK_VALID_SIZE(inputs.size(), 2);
1089
Derek Lamberti8ddae332019-02-21 16:29:43 +00001090 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001091 CHECK_VALID_SIZE(outputs.size(), 1);
1092
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001093 auto layerName = GetLayerName(graph, layerIndex);
1094 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001095
1096 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1097 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1098
Derek Lamberti8ddae332019-02-21 16:29:43 +00001099 RegisterInputSlots(graph, layerIndex, layer);
1100 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001101}
1102
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001103void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1104{
1105 CHECK_LAYERS(graph, 0, layerIndex);
1106 auto inputs = GetInputs(graph, layerIndex);
1107 CHECK_LOCATION();
1108 CHECK_VALID_SIZE(inputs.size(), 1);
1109
1110 auto outputs = GetOutputs(graph, layerIndex);
1111 CHECK_VALID_SIZE(outputs.size(), 1);
1112
1113 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1114 auto serializerDescriptor = serializerLayer->descriptor();
1115
1116 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001117 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001118 descriptor.m_Axis = serializerDescriptor->axis();
1119 auto layerName = GetLayerName(graph, layerIndex);
1120 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1121
1122 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1123 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1124
1125 RegisterInputSlots(graph, layerIndex, layer);
1126 RegisterOutputSlots(graph, layerIndex, layer);
1127}
1128
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001129void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1130{
1131 CHECK_LAYERS(graph, 0, layerIndex);
1132
1133 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1134 CHECK_VALID_SIZE(inputs.size(), 1);
1135
1136 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1137 CHECK_VALID_SIZE(outputs.size(), 1);
1138
1139 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1140 auto flatBufferCrops = flatBufferDescriptor->crops();
1141 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1142
1143 if (flatBufferCrops->Length() % 2 != 0)
1144 {
1145 throw ParseException(boost::str(
1146 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1147 }
1148
1149 std::vector<std::pair<unsigned int, unsigned int>> crops;
1150 crops.reserve(flatBufferCrops->Length() / 2);
1151 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1152 {
1153 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1154 }
1155
1156 armnn::BatchToSpaceNdDescriptor descriptor;
1157 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1158 descriptor.m_BlockShape =
1159 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1160 descriptor.m_Crops = crops;
1161
1162 auto layerName = GetLayerName(graph, layerIndex);
1163 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1164
1165 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1166 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1167
1168 RegisterInputSlots(graph, layerIndex, layer);
1169 RegisterOutputSlots(graph, layerIndex, layer);
1170}
1171
ruoyan018e7fa232019-02-28 15:09:07 +00001172void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1173{
1174 CHECK_LAYERS(graph, 0, layerIndex);
1175
1176 auto inputs = GetInputs(graph, layerIndex);
1177 CHECK_VALID_SIZE(inputs.size(), 1);
1178
1179 auto outputs = GetOutputs(graph, layerIndex);
1180 CHECK_VALID_SIZE(outputs.size(), 1);
1181 auto outputInfo = ToTensorInfo(outputs[0]);
1182
ruoyan015c7ab052019-03-04 14:48:02 +00001183 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001184
1185 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1186 auto serializerDescriptor = serializerLayer->descriptor();
1187
1188 armnn::BatchNormalizationDescriptor descriptor;
1189 descriptor.m_Eps = serializerDescriptor->eps();
1190 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1191
1192 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1193 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1194 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1195 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1196
1197 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1198 mean,
1199 variance,
1200 beta,
1201 gamma,
1202 layerName.c_str());
1203 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1204
1205 RegisterInputSlots(graph, layerIndex, layer);
1206 RegisterOutputSlots(graph, layerIndex, layer);
1207}
1208
Conor Kennedy76277882019-02-26 08:29:54 +00001209void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1210{
1211 CHECK_LAYERS(graph, 0, layerIndex);
1212 CHECK_LOCATION();
1213
1214 auto outputs = GetOutputs(graph, layerIndex);
1215 CHECK_VALID_SIZE(outputs.size(), 1);
1216
1217 auto layerName = GetLayerName(graph, layerIndex);
1218
1219 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1220 auto serializerInput = serializerLayer->input();
1221
1222 armnn::ConstTensor input = ToConstTensor(serializerInput);
1223
1224 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1225
1226 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1227 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1228
1229 RegisterOutputSlots(graph, layerIndex, layer);
1230}
1231
Derek Lamberti8ddae332019-02-21 16:29:43 +00001232void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001233{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001234 CHECK_LAYERS(graph, 0, layerIndex);
1235 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001236 CHECK_LOCATION();
1237 CHECK_VALID_SIZE(inputs.size(), 1);
1238
Derek Lamberti8ddae332019-02-21 16:29:43 +00001239 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001240 CHECK_VALID_SIZE(outputs.size(), 1);
1241
Derek Lamberti8ddae332019-02-21 16:29:43 +00001242 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001243 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001244 auto serializerDescriptor = serializerLayer->descriptor();
1245
1246 armnn::Convolution2dDescriptor descriptor;
1247 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1248 descriptor.m_PadRight = serializerDescriptor->padRight();
1249 descriptor.m_PadTop = serializerDescriptor->padTop();
1250 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1251 descriptor.m_StrideX = serializerDescriptor->strideX();
1252 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001253 descriptor.m_DilationX = serializerDescriptor->dilationX();
1254 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001255 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1256 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1257
1258 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1259 armnn::ConstTensor biases;
1260
Matteo Martincighfc598e12019-05-14 10:36:13 +01001261 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001262 if (descriptor.m_BiasEnabled)
1263 {
1264 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001265 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001266 }
1267 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1268 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001269 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001270 layerName.c_str());
1271 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1272 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1273
Derek Lamberti8ddae332019-02-21 16:29:43 +00001274 RegisterInputSlots(graph, layerIndex, layer);
1275 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001276}
1277
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001278void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1279{
1280 CHECK_LAYERS(graph, 0, layerIndex);
1281
1282 auto inputs = GetInputs(graph, layerIndex);
1283 CHECK_VALID_SIZE(inputs.size(), 1);
1284
1285 auto outputs = GetOutputs(graph, layerIndex);
1286 CHECK_VALID_SIZE(outputs.size(), 1);
1287
1288 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1289
1290 armnn::DepthToSpaceDescriptor descriptor;
1291 descriptor.m_BlockSize = fbDescriptor->blockSize();
1292 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1293
1294 auto layerName = GetLayerName(graph, layerIndex);
1295 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1296
1297 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1298 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1299
1300 RegisterInputSlots(graph, layerIndex, layer);
1301 RegisterOutputSlots(graph, layerIndex, layer);
1302}
1303
Derek Lamberti8ddae332019-02-21 16:29:43 +00001304void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001305{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001306 CHECK_LAYERS(graph, 0, layerIndex);
1307 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001308 CHECK_LOCATION();
1309 CHECK_VALID_SIZE(inputs.size(), 1);
1310
Derek Lamberti8ddae332019-02-21 16:29:43 +00001311 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001312 CHECK_VALID_SIZE(outputs.size(), 1);
1313
Derek Lamberti8ddae332019-02-21 16:29:43 +00001314 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001315 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001316 auto serializerDescriptor = serializerLayer->descriptor();
1317
1318 armnn::DepthwiseConvolution2dDescriptor descriptor;
1319 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1320 descriptor.m_PadRight = serializerDescriptor->padRight();
1321 descriptor.m_PadTop = serializerDescriptor->padTop();
1322 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1323 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001324 descriptor.m_StrideY = serializerDescriptor->strideY();
1325 descriptor.m_DilationX = serializerDescriptor->dilationX();
1326 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001327 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1328 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1329
1330 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1331 armnn::ConstTensor biases;
1332
Matteo Martincighfc598e12019-05-14 10:36:13 +01001333 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001334 if (descriptor.m_BiasEnabled)
1335 {
1336 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001337 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001338 }
1339 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1340 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001341 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001342 layerName.c_str());
1343
1344 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1345 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1346
Derek Lamberti8ddae332019-02-21 16:29:43 +00001347 RegisterInputSlots(graph, layerIndex, layer);
1348 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001349}
1350
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001351void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1352{
1353 CHECK_LAYERS(graph, 0, layerIndex);
1354 auto inputs = GetInputs(graph, layerIndex);
1355 CHECK_LOCATION();
1356 CHECK_VALID_SIZE(inputs.size(), 2);
1357
1358 auto outputs = GetOutputs(graph, layerIndex);
1359 CHECK_VALID_SIZE(outputs.size(), 4);
1360
1361 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1362 auto layerName = GetLayerName(graph, layerIndex);
1363 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1364
1365 armnn::DetectionPostProcessDescriptor descriptor;
1366 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1367 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1368 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1369 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1370 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1371 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1372 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1373 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1374 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1375 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1376 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1377
1378 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1379
1380 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1381 anchors,
1382 layerName.c_str());
1383
1384 for (unsigned int i = 0; i < 4; i++)
1385 {
1386 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1387 }
1388
1389 RegisterInputSlots(graph, layerIndex, layer);
1390 RegisterOutputSlots(graph, layerIndex, layer);
1391}
1392
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001393void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1394{
1395 CHECK_LAYERS(graph, 0, layerIndex);
1396 auto inputs = GetInputs(graph, layerIndex);
1397 CHECK_LOCATION();
1398 CHECK_VALID_SIZE(inputs.size(), 2);
1399
1400 auto outputs = GetOutputs(graph, layerIndex);
1401 CHECK_VALID_SIZE(outputs.size(), 1);
1402
1403 auto layerName = GetLayerName(graph, layerIndex);
1404 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1405
1406 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1407 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1408
1409 RegisterInputSlots(graph, layerIndex, layer);
1410 RegisterOutputSlots(graph, layerIndex, layer);
1411}
1412
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001413void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1414{
1415 CHECK_LAYERS(graph, 0, layerIndex);
1416 auto inputs = GetInputs(graph, layerIndex);
1417 CHECK_LOCATION();
1418 CHECK_VALID_SIZE(inputs.size(), 2);
1419
1420 auto outputs = GetOutputs(graph, layerIndex);
1421 CHECK_VALID_SIZE(outputs.size(), 1);
1422
1423 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001424 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1425 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001426
1427 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1428 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1429
1430 RegisterInputSlots(graph, layerIndex, layer);
1431 RegisterOutputSlots(graph, layerIndex, layer);
1432}
1433
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001434void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1435{
1436 CHECK_LAYERS(graph, 0, layerIndex);
1437 auto inputs = GetInputs(graph, layerIndex);
1438 CHECK_LOCATION();
1439 CHECK_VALID_SIZE(inputs.size(), 2);
1440
1441 auto outputs = GetOutputs(graph, layerIndex);
1442 CHECK_VALID_SIZE(outputs.size(), 1);
1443
1444 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001445 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1446 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001447
1448 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1449 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1450
1451 RegisterInputSlots(graph, layerIndex, layer);
1452 RegisterOutputSlots(graph, layerIndex, layer);
1453}
1454
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001455void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1456{
1457 CHECK_LAYERS(graph, 0, layerIndex);
1458
1459 auto inputs = GetInputs(graph, layerIndex);
1460 CHECK_VALID_SIZE(inputs.size(), 1);
1461
1462 auto outputs = GetOutputs(graph, layerIndex);
1463 CHECK_VALID_SIZE(outputs.size(), 1);
1464
1465 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1466 auto fbDescriptor = fbLayer->descriptor();
1467
1468 armnn::InstanceNormalizationDescriptor descriptor;
1469 descriptor.m_Gamma = fbDescriptor->gamma();
1470 descriptor.m_Beta = fbDescriptor->beta();
1471 descriptor.m_Eps = fbDescriptor->eps();
1472 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1473
1474 const std::string layerName = GetLayerName(graph, layerIndex);
1475 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1476
1477 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1478 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1479
1480 RegisterInputSlots(graph, layerIndex, layer);
1481 RegisterOutputSlots(graph, layerIndex, layer);
1482}
1483
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001484void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1485{
1486 CHECK_LAYERS(graph, 0, layerIndex);
1487
1488 auto inputs = GetInputs(graph, layerIndex);
1489 CHECK_VALID_SIZE(inputs.size(), 1);
1490
1491 auto outputs = GetOutputs(graph, layerIndex);
1492 CHECK_VALID_SIZE(outputs.size(), 1);
1493 auto outputInfo = ToTensorInfo(outputs[0]);
1494
1495 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1496 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1497
1498 auto layerName = GetLayerName(graph, layerIndex);
1499 armnn::L2NormalizationDescriptor descriptor;
1500 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001501 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001502
1503 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1504 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1505
1506 RegisterInputSlots(graph, layerIndex, layer);
1507 RegisterOutputSlots(graph, layerIndex, layer);
1508}
1509
Sadik Armagan26257852019-10-14 13:00:47 +01001510void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1511{
1512 CHECK_LAYERS(graph, 0, layerIndex);
1513
1514 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1515 CHECK_VALID_SIZE(inputs.size(), 1);
1516
1517 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1518 CHECK_VALID_SIZE(outputs.size(), 1);
1519
1520 armnn::LogSoftmaxDescriptor descriptor;
1521 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1522 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1523 auto layerName = GetLayerName(graph, layerIndex);
1524
1525 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1526
1527 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1528 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1529
1530 RegisterInputSlots(graph, layerIndex, layer);
1531 RegisterOutputSlots(graph, layerIndex, layer);
1532}
1533
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001534void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1535{
1536 CHECK_LAYERS(graph, 0, layerIndex);
1537 auto inputs = GetInputs(graph, layerIndex);
1538 CHECK_LOCATION();
1539 CHECK_VALID_SIZE(inputs.size(), 2);
1540
1541 auto outputs = GetOutputs(graph, layerIndex);
1542 CHECK_VALID_SIZE(outputs.size(), 1);
1543
1544 auto layerName = GetLayerName(graph, layerIndex);
1545 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1546
1547 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1548 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1549
1550 RegisterInputSlots(graph, layerIndex, layer);
1551 RegisterOutputSlots(graph, layerIndex, layer);
1552}
1553
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001554void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1555{
1556 CHECK_LAYERS(graph, 0, layerIndex);
1557 auto inputs = GetInputs(graph, layerIndex);
1558 CHECK_LOCATION();
1559 CHECK_VALID_SIZE(inputs.size(), 2);
1560
1561 auto outputs = GetOutputs(graph, layerIndex);
1562 CHECK_VALID_SIZE(outputs.size(), 1);
1563
1564 auto layerName = GetLayerName(graph, layerIndex);
1565 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1566
1567 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1568 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1569
1570 RegisterInputSlots(graph, layerIndex, layer);
1571 RegisterOutputSlots(graph, layerIndex, layer);
1572}
1573
Jim Flynne242f2d2019-05-22 14:24:13 +01001574const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1575 unsigned int layerIndex)
1576{
1577 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1578
1579 switch (layerType)
1580 {
1581 case Layer::Layer_ConcatLayer:
1582 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1583 case Layer::Layer_MergerLayer:
1584 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1585 default:
1586 throw armnn::Exception("unknown layer type, should be concat or merger");
1587 }
1588}
1589
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001590void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1591{
1592 CHECK_LAYERS(graph, 0, layerIndex);
1593 CHECK_LOCATION();
1594
1595 auto inputs = GetInputs(graph, layerIndex);
1596 CHECK_VALID_SIZE(inputs.size(), 2);
1597
1598 auto outputs = GetOutputs(graph, layerIndex);
1599 CHECK_VALID_SIZE(outputs.size(), 1);
1600
1601 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1602 auto fbDescriptor = fbLayer->descriptor();
1603
1604 armnn::ComparisonDescriptor descriptor;
1605 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1606
1607 const std::string& layerName = GetLayerName(graph, layerIndex);
1608 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1609
1610 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1611 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1612
1613 RegisterInputSlots(graph, layerIndex, layer);
1614 RegisterOutputSlots(graph, layerIndex, layer);
1615}
1616
josh minor4a3c6102020-01-06 16:40:46 -06001617void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1618{
1619 CHECK_LAYERS(graph, 0, layerIndex);
1620 CHECK_LOCATION();
1621
1622 auto inputs = GetInputs(graph, layerIndex);
1623 CHECK_VALID_SIZE(inputs.size(), 1);
1624
1625 auto outputs = GetOutputs(graph, layerIndex);
1626 CHECK_VALID_SIZE(outputs.size(), 1);
1627
1628 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1629 auto fbDescriptor = fbLayer->descriptor();
1630
1631 armnn::ElementwiseUnaryDescriptor descriptor;
1632 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1633
1634 const std::string& layerName = GetLayerName(graph, layerIndex);
1635 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1636
1637 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1638 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1639
1640 RegisterInputSlots(graph, layerIndex, layer);
1641 RegisterOutputSlots(graph, layerIndex, layer);
1642}
1643
Jim Flynn906f9462019-05-10 13:55:21 +01001644void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001645{
1646 CHECK_LAYERS(graph, 0, layerIndex);
1647 CHECK_LOCATION();
1648
1649 auto outputs = GetOutputs(graph, layerIndex);
1650 CHECK_VALID_SIZE(outputs.size(), 1);
1651
Jim Flynnac25a1b2019-02-28 10:40:49 +00001652 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001653 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1654 unsigned int numViews = originsDescriptor->numViews();
1655 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001656
1657 // can now check the number of inputs == number of views
1658 auto inputs = GetInputs(graph, layerIndex);
1659 CHECK_VALID_SIZE(inputs.size(), numViews);
1660
1661 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001662 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001663 for (unsigned int v = 0; v < numViews; ++v)
1664 {
1665 auto originPtr = originsPtr->Get(v);
1666 for (unsigned int d = 0; d < numDimensions; ++d)
1667 {
1668 uint32_t value = originPtr->data()->Get(d);
1669 descriptor.SetViewOriginCoord(v, d, value);
1670 }
1671 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001672 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001673
Jim Flynn906f9462019-05-10 13:55:21 +01001674 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001675 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1676 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1677
1678 RegisterInputSlots(graph, layerIndex, layer);
1679 RegisterOutputSlots(graph, layerIndex, layer);
1680}
1681
Derek Lamberti8ddae332019-02-21 16:29:43 +00001682void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001683{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001684 CHECK_LAYERS(graph, 0, layerIndex);
1685 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001686 CHECK_LOCATION();
1687 CHECK_VALID_SIZE(inputs.size(), 2);
1688
Derek Lamberti8ddae332019-02-21 16:29:43 +00001689 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001690 CHECK_VALID_SIZE(outputs.size(), 1);
1691
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001692 auto layerName = GetLayerName(graph, layerIndex);
1693 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001694
1695 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1696 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1697
Derek Lamberti8ddae332019-02-21 16:29:43 +00001698 RegisterInputSlots(graph, layerIndex, layer);
1699 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001700}
1701
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001702void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1703{
1704 CHECK_LAYERS(graph, 0, layerIndex);
1705 CHECK_LOCATION();
1706
1707 auto inputs = GetInputs(graph, layerIndex);
1708 CHECK_VALID_SIZE(inputs.size(), 1);
1709
1710 auto outputs = GetOutputs(graph, layerIndex);
1711 CHECK_VALID_SIZE(outputs.size(), 1);
1712
1713 auto layerName = GetLayerName(graph, layerIndex);
1714
1715 armnn::IConnectableLayer* layer;
1716
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001717 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001718
1719 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1720 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1721
1722 RegisterInputSlots(graph, layerIndex, layer);
1723 RegisterOutputSlots(graph, layerIndex, layer);
1724}
1725
Derek Lamberti8ddae332019-02-21 16:29:43 +00001726void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001727{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001728 CHECK_LAYERS(graph, 0, layerIndex);
1729 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001730 CHECK_LOCATION();
1731 CHECK_VALID_SIZE(inputs.size(), 1);
1732
Derek Lamberti8ddae332019-02-21 16:29:43 +00001733 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001734 CHECK_VALID_SIZE(outputs.size(), 1);
1735
Derek Lamberti8ddae332019-02-21 16:29:43 +00001736 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001737 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001738 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1739
1740 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1741 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1742 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1743
1744 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1745
1746 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001747 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001748 if (flatBufferDescriptor->biasEnabled())
1749 {
1750 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001751 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001752 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001753 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1754 weightsTensor,
1755 optionalBiases,
1756 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001757
1758 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1759 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1760
Derek Lamberti8ddae332019-02-21 16:29:43 +00001761 RegisterInputSlots(graph, layerIndex, layer);
1762 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001763}
1764
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001765void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1766{
1767 CHECK_LAYERS(graph, 0, layerIndex);
1768
1769 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1770 CHECK_VALID_SIZE(inputs.size(), 1);
1771
1772 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1773 CHECK_VALID_SIZE(outputs.size(), 1);
1774
1775 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1776 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001777 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001778
1779 if (flatBufferPadList->Length() % 2 != 0)
1780 {
1781 throw ParseException(boost::str(
1782 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1783 }
1784
1785 std::vector<std::pair<unsigned int, unsigned int>> padList;
1786 padList.reserve(flatBufferPadList->Length() / 2);
1787 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1788 {
1789 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1790 }
1791
David Monahan34757812019-06-19 11:47:21 +01001792 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001793
1794 auto layerName = GetLayerName(graph, layerIndex);
1795 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1796
1797 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1798 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1799
1800 RegisterInputSlots(graph, layerIndex, layer);
1801 RegisterOutputSlots(graph, layerIndex, layer);
1802}
1803
Derek Lamberti8ddae332019-02-21 16:29:43 +00001804void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001805{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001806 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001807
1808 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001809 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001810
Derek Lamberti8ddae332019-02-21 16:29:43 +00001811 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001812 CHECK_VALID_SIZE(inputs.size(), 1);
1813
Derek Lamberti8ddae332019-02-21 16:29:43 +00001814 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001815 CHECK_VALID_SIZE(outputs.size(), 1);
1816 auto outputInfo = ToTensorInfo(outputs[0]);
1817
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001818 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001819 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1820
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001821 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001822 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1823
Derek Lamberti8ddae332019-02-21 16:29:43 +00001824 RegisterInputSlots(graph, layerIndex, layer);
1825 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001826}
1827
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001828armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001829 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001830{
Jan Eilers8eb25602020-03-09 12:13:48 +00001831 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001832 armnn::Pooling2dDescriptor desc;
1833
1834 switch (pooling2dDesc->poolType())
1835 {
1836 case PoolingAlgorithm_Average:
1837 {
1838 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001839 break;
1840 }
1841 case PoolingAlgorithm_Max:
1842 {
1843 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001844 break;
1845 }
1846 default:
1847 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001848 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001849 }
1850 }
1851
1852 switch (pooling2dDesc->outputShapeRounding())
1853 {
1854 case OutputShapeRounding_Floor:
1855 {
1856 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1857 break;
1858 }
1859 case OutputShapeRounding_Ceiling:
1860 {
1861 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1862 break;
1863 }
1864 default:
1865 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001866 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001867 }
1868 }
1869
1870 switch (pooling2dDesc->paddingMethod())
1871 {
1872 case PaddingMethod_Exclude:
1873 {
1874 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1875 break;
1876 }
1877 case PaddingMethod_IgnoreValue:
1878 {
1879 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1880 break;
1881 }
1882 default:
1883 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001884 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001885 }
1886 }
1887
1888 switch (pooling2dDesc->dataLayout())
1889 {
1890 case DataLayout_NCHW:
1891 {
1892 desc.m_DataLayout = armnn::DataLayout::NCHW;
1893 break;
1894 }
1895 case DataLayout_NHWC:
1896 {
1897 desc.m_DataLayout = armnn::DataLayout::NHWC;
1898 break;
1899 }
1900 default:
1901 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001902 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001903 }
1904 }
1905
1906 desc.m_PadRight = pooling2dDesc->padRight();
1907 desc.m_PadLeft = pooling2dDesc->padLeft();
1908 desc.m_PadBottom = pooling2dDesc->padBottom();
1909 desc.m_PadTop = pooling2dDesc->padTop();
1910 desc.m_StrideX = pooling2dDesc->strideX();
1911 desc.m_StrideY = pooling2dDesc->strideY();
1912 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1913 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1914
1915 return desc;
1916}
1917
Derek Lamberti8ddae332019-02-21 16:29:43 +00001918void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001919{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001920 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001921
Derek Lamberti8ddae332019-02-21 16:29:43 +00001922 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001923 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001924 CHECK_VALID_SIZE(inputs.size(), 1);
1925
Derek Lamberti8ddae332019-02-21 16:29:43 +00001926 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001927 CHECK_VALID_SIZE(outputs.size(), 1);
1928 auto outputInfo = ToTensorInfo(outputs[0]);
1929
1930 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001931 auto layerName = GetLayerName(graph, layerIndex);
1932 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001933 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1934
Derek Lamberti8ddae332019-02-21 16:29:43 +00001935 RegisterInputSlots(graph, layerIndex, layer);
1936 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001937}
1938
Derek Lamberti87acb272019-03-27 16:51:31 +00001939void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1940{
1941 CHECK_LAYERS(graph, 0, layerIndex);
1942
1943 auto inputs = GetInputs(graph, layerIndex);
1944 CHECK_VALID_SIZE(inputs.size(), 1);
1945
1946 auto outputs = GetOutputs(graph, layerIndex);
1947 CHECK_VALID_SIZE(outputs.size(), 1);
1948 auto outputInfo = ToTensorInfo(outputs[0]);
1949
1950 auto layerName = GetLayerName(graph, layerIndex);
1951 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1952 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1953
1954 RegisterInputSlots(graph, layerIndex, layer);
1955 RegisterOutputSlots(graph, layerIndex, layer);
1956}
1957
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001958armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001959 const std::vector<uint32_t>& targetDimsIn)
1960{
1961 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1962 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1963
1964 if (stretchDim != targetDimsIn.end())
1965 {
1966 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1967 {
1968 throw ParseException(boost::str(
1969 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1970 }
1971
1972 auto targetNumElements =
1973 boost::numeric_cast<unsigned int>(
1974 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1975
1976 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1977 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1978 }
1979
1980 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1981
1982 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1983 reshapeInfo.SetShape(outputShape);
1984
1985 return reshapeInfo;
1986}
1987
Derek Lamberti8ddae332019-02-21 16:29:43 +00001988void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001989{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001990 CHECK_LAYERS(graph, 0, layerIndex);
1991 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001992
Derek Lamberti8ddae332019-02-21 16:29:43 +00001993 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001994 CHECK_VALID_SIZE(outputs.size(), 1);
1995
1996 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1997 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1998
Derek Lamberti8ddae332019-02-21 16:29:43 +00001999 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002000 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2001
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002002 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002003 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2004
2005 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2006 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2007
2008 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2009 {
2010 std::stringstream ss;
2011 ss << "New shape defined in reshape parameters "
2012 << reshapeOutputTensorShape
2013 << " does not equal output shape "
2014 << actualOutputTensorInfo.GetShape()
2015 << ": "
2016 << CHECK_LOCATION().AsString();
2017 throw ParseException(ss.str());
2018 }
2019
2020 armnn::ReshapeDescriptor reshapeDesc;
2021 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2022
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002023 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002024 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2025 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2026
Derek Lamberti8ddae332019-02-21 16:29:43 +00002027 RegisterInputSlots(graph, layerIndex, layer);
2028 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002029}
2030
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002031void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2032{
2033 CHECK_LAYERS(graph, 0, layerIndex);
2034
2035 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2036 CHECK_VALID_SIZE(inputs.size(), 1);
2037
2038 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2039 CHECK_VALID_SIZE(outputs.size(), 1);
2040
2041 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2042
2043 armnn::ResizeDescriptor descriptor;
2044 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2045 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2046 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2047 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2048
2049 auto layerName = GetLayerName(graph, layerIndex);
2050 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2051
2052 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2053 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2054
2055 RegisterInputSlots(graph, layerIndex, layer);
2056 RegisterOutputSlots(graph, layerIndex, layer);
2057}
2058
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002059void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2060{
2061 CHECK_LAYERS(graph, 0, layerIndex);
2062
2063 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2064 CHECK_VALID_SIZE(inputs.size(), 1);
2065
2066 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2067 CHECK_VALID_SIZE(outputs.size(), 1);
2068
2069 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2070
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002071 armnn::ResizeDescriptor descriptor;
2072 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002073 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002074 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2075 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002076
2077 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002078 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002079
2080 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2081 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2082
2083 RegisterInputSlots(graph, layerIndex, layer);
2084 RegisterOutputSlots(graph, layerIndex, layer);
2085}
2086
Derek Lamberti8ddae332019-02-21 16:29:43 +00002087void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002088{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002089 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002090
Derek Lamberti8ddae332019-02-21 16:29:43 +00002091 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002092 CHECK_VALID_SIZE(inputs.size(), 1);
2093
Derek Lamberti8ddae332019-02-21 16:29:43 +00002094 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002095 CHECK_VALID_SIZE(outputs.size(), 1);
2096
2097 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002098 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002099 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002100
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002101 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2102
2103 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2104 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2105
Derek Lamberti8ddae332019-02-21 16:29:43 +00002106 RegisterInputSlots(graph, layerIndex, layer);
2107 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002108}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002109
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002110void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2111{
2112 CHECK_LAYERS(graph, 0, layerIndex);
2113
2114 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2115 CHECK_VALID_SIZE(inputs.size(), 1);
2116
2117 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2118 CHECK_VALID_SIZE(outputs.size(), 1);
2119
2120 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2121 auto flatBufferPadList = flatBufferDescriptor->padList();
2122 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2123
2124 if (flatBufferPadList->Length() % 2 != 0)
2125 {
2126 throw ParseException(boost::str(
2127 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
2128 }
2129
2130 std::vector<std::pair<unsigned int, unsigned int>> padList;
2131 padList.reserve(flatBufferPadList->Length() / 2);
2132 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2133 {
2134 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2135 }
2136
2137 armnn::SpaceToBatchNdDescriptor descriptor;
2138 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2139 descriptor.m_BlockShape =
2140 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2141 descriptor.m_PadList = padList;
2142
2143 auto layerName = GetLayerName(graph, layerIndex);
2144 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2145
2146 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2147 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2148
2149 RegisterInputSlots(graph, layerIndex, layer);
2150 RegisterOutputSlots(graph, layerIndex, layer);
2151}
2152
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002153void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2154{
2155 CHECK_LAYERS(graph, 0, layerIndex);
2156
2157 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2158 CHECK_VALID_SIZE(inputs.size(), 1);
2159
2160 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2161 CHECK_VALID_SIZE(outputs.size(), 1);
2162
2163 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2164
2165 armnn::SpaceToDepthDescriptor descriptor;
2166 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2167 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2168
2169 auto layerName = GetLayerName(graph, layerIndex);
2170 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2171
2172 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2173 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2174
2175 RegisterInputSlots(graph, layerIndex, layer);
2176 RegisterOutputSlots(graph, layerIndex, layer);
2177}
2178
Nina Drozd57728782019-02-27 10:53:27 +00002179armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2180 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2181 unsigned int layerIndex)
2182{
Jan Eilers8eb25602020-03-09 12:13:48 +00002183 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002184 armnn::NormalizationDescriptor desc;
2185
2186 switch (normalizationDescriptor->normChannelType())
2187 {
2188 case NormalizationAlgorithmChannel_Across:
2189 {
2190 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2191 break;
2192 }
2193 case NormalizationAlgorithmChannel_Within:
2194 {
2195 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2196 break;
2197 }
2198 default:
2199 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002200 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002201 }
2202 }
2203
2204 switch (normalizationDescriptor->normMethodType())
2205 {
2206 case NormalizationAlgorithmMethod_LocalBrightness:
2207 {
2208 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2209 break;
2210 }
2211 case NormalizationAlgorithmMethod_LocalContrast:
2212 {
2213 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2214 break;
2215 }
2216 default:
2217 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002218 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002219 }
2220 }
2221
2222 switch (normalizationDescriptor->dataLayout())
2223 {
2224 case DataLayout_NCHW:
2225 {
2226 desc.m_DataLayout = armnn::DataLayout::NCHW;
2227 break;
2228 }
2229 case DataLayout_NHWC:
2230 {
2231 desc.m_DataLayout = armnn::DataLayout::NHWC;
2232 break;
2233 }
2234 default:
2235 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002236 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002237 }
2238 }
2239
2240 desc.m_Alpha = normalizationDescriptor->alpha();
2241 desc.m_Beta = normalizationDescriptor->beta();
2242 desc.m_K = normalizationDescriptor->k();
2243 desc.m_NormSize = normalizationDescriptor->normSize();
2244
2245 return desc;
2246}
2247
2248void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2249{
2250 CHECK_LAYERS(graph, 0, layerIndex);
2251
2252 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2253
2254 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2255 CHECK_VALID_SIZE(inputs.size(), 1);
2256
2257 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2258 CHECK_VALID_SIZE(outputs.size(), 1);
2259
2260 auto outputInfo = ToTensorInfo(outputs[0]);
2261
2262 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2263 auto layerName = GetLayerName(graph, layerIndex);
2264
2265 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2266 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2267
2268 RegisterInputSlots(graph, layerIndex, layer);
2269 RegisterOutputSlots(graph, layerIndex, layer);
2270}
2271
Sadik Armagan8b42a382019-03-01 14:24:49 +00002272void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2273{
2274 CHECK_LAYERS(graph, 0, layerIndex);
2275 auto inputs = GetInputs(graph, layerIndex);
2276 CHECK_LOCATION();
2277 CHECK_VALID_SIZE(inputs.size(), 1);
2278
2279 auto outputs = GetOutputs(graph, layerIndex);
2280 CHECK_VALID_SIZE(outputs.size(), 1);
2281
2282 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002283
josh minor4a3c6102020-01-06 16:40:46 -06002284 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2285 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002286 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2287 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2288
2289 RegisterInputSlots(graph, layerIndex, layer);
2290 RegisterOutputSlots(graph, layerIndex, layer);
2291}
2292
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002293void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2294{
2295 CHECK_LAYERS(graph, 0, layerIndex);
2296
2297 auto inputs = GetInputs(graph, layerIndex);
2298 CHECK_VALID_SIZE(inputs.size(), 1);
2299
2300 auto outputs = GetOutputs(graph, layerIndex);
2301 CHECK_VALID_SIZE(outputs.size(), 1);
2302
2303 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2304
2305 auto fbBegin = fbDescriptor->begin();
2306 auto fbSize = fbDescriptor->size();
2307
2308 if (fbBegin->Length() != fbSize->Length())
2309 {
2310 throw ParseException(boost::str(
2311 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2312 }
2313
2314 armnn::SliceDescriptor descriptor;
2315 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2316 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2317
2318 auto layerName = GetLayerName(graph, layerIndex);
2319 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2320
2321 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2322 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2323
2324 RegisterInputSlots(graph, layerIndex, layer);
2325 RegisterOutputSlots(graph, layerIndex, layer);
2326}
2327
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002328void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2329{
2330 CHECK_LAYERS(graph, 0, layerIndex);
2331
2332 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2333 CHECK_VALID_SIZE(inputs.size(), 1);
2334
2335 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2336 CHECK_VALID_SIZE(outputs.size(), 1);
2337
2338 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2339
2340 auto flatBufferBegin = flatBufferDescriptor->begin();
2341 auto flatBufferEnd = flatBufferDescriptor->end();
2342 auto flatBufferStride = flatBufferDescriptor->stride();
2343
2344 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2345 flatBufferBegin->Length() == flatBufferStride->Length()))
2346 {
2347 throw ParseException(boost::str(
2348 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2349 }
2350
2351 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2352 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2353 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2354
2355 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2356 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2357 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2358 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2359 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2360 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2361 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2362
2363 auto layerName = GetLayerName(graph, layerIndex);
2364 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2365
2366 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2367 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2368
2369 RegisterInputSlots(graph, layerIndex, layer);
2370 RegisterOutputSlots(graph, layerIndex, layer);
2371}
2372
Conor Kennedyda1f9752019-03-01 14:37:12 +00002373void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2374{
2375 CHECK_LAYERS(graph, 0, layerIndex);
2376 auto inputs = GetInputs(graph, layerIndex);
2377 CHECK_LOCATION();
2378 CHECK_VALID_SIZE(inputs.size(), 2);
2379
2380 auto outputs = GetOutputs(graph, layerIndex);
2381 CHECK_VALID_SIZE(outputs.size(), 1);
2382
2383 auto layerName = GetLayerName(graph, layerIndex);
2384 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2385
2386 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2387 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2388
2389 RegisterInputSlots(graph, layerIndex, layer);
2390 RegisterOutputSlots(graph, layerIndex, layer);
2391}
2392
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002393void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2394{
2395 CHECK_LAYERS(graph, 0, layerIndex);
2396
2397 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2398 CHECK_VALID_SIZE(inputs.size(), 2);
2399
2400 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2401 CHECK_VALID_SIZE(outputs.size(), 1);
2402
2403 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002404 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2405
2406 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002407 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2408
2409 RegisterInputSlots(graph, layerIndex, layer);
2410 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002411}
2412
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002413void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2414{
2415 CHECK_LAYERS(graph, 0, layerIndex);
2416
2417 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2418 CHECK_VALID_SIZE(inputs.size(), 1);
2419
2420 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2421 CHECK_VALID_SIZE(outputs.size(), 1);
2422
2423 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2424 auto flatBufferAxis = flatBufferDescriptor->axis();
2425 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2426
2427 armnn::MeanDescriptor descriptor;
2428 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2429 descriptor.m_KeepDims = flatBufferKeepDims;
2430
2431 auto layerName = GetLayerName(graph, layerIndex);
2432 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2433
2434 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2435 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2436
2437 RegisterInputSlots(graph, layerIndex, layer);
2438 RegisterOutputSlots(graph, layerIndex, layer);
2439}
2440
Jim Flynn18ce3382019-03-08 11:08:30 +00002441void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2442{
2443 CHECK_LAYERS(graph, 0, layerIndex);
2444
2445 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2446 CHECK_VALID_SIZE(inputs.size(), 1);
2447
2448 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2449
2450 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2451 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2452 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2453 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2454 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2455 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2456
2457 // Check numViews and numDimensions corresponds to the ones already serialized ...
2458 // numViews == flatBufferViewSizes.size();
2459 // foreach: numDimensions == flatBufferViewSizes[x].size();
2460
2461 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2462 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2463 {
2464 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2465 {
2466 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2467 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2468 }
2469 }
2470
2471 auto layerName = GetLayerName(graph, layerIndex);
2472 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2473
2474 // I could have as many outputs as views ...
2475 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2476 {
2477 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2478 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2479 }
2480
2481 RegisterInputSlots(graph, layerIndex, layer);
2482 RegisterOutputSlots(graph, layerIndex, layer);
2483}
2484
Jim Flynn11af3752019-03-19 17:22:29 +00002485armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2486{
2487 armnn::LstmDescriptor desc;
2488
2489 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2490 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2491 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2492 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2493 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2494 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002495 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002496
2497 return desc;
2498}
2499
2500void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2501{
2502 CHECK_LAYERS(graph, 0, layerIndex);
2503
2504 auto inputs = GetInputs(graph, layerIndex);
2505 CHECK_VALID_SIZE(inputs.size(), 3);
2506
2507 auto outputs = GetOutputs(graph, layerIndex);
2508 CHECK_VALID_SIZE(outputs.size(), 4);
2509
2510 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2511 auto layerName = GetLayerName(graph, layerIndex);
2512 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2513 auto flatBufferInputParams = flatBufferLayer->inputParams();
2514
2515 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2516
2517 armnn::LstmInputParams lstmInputParams;
2518
2519 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2520 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2521 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2522 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2523 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2524 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2525 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2526 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2527 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2528
2529 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2530 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2531 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2532 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2533 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2534 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2535 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2536 lstmInputParams.m_CellBias = &cellBias;
2537 lstmInputParams.m_OutputGateBias = &outputGateBias;
2538
2539 armnn::ConstTensor inputToInputWeights;
2540 armnn::ConstTensor recurrentToInputWeights;
2541 armnn::ConstTensor cellToInputWeights;
2542 armnn::ConstTensor inputGateBias;
2543 if (!lstmDescriptor.m_CifgEnabled)
2544 {
2545 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2546 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2547 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2548 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2549
2550 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2551 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2552 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2553 lstmInputParams.m_InputGateBias = &inputGateBias;
2554 }
2555
2556 armnn::ConstTensor projectionWeights;
2557 armnn::ConstTensor projectionBias;
2558 if (lstmDescriptor.m_ProjectionEnabled)
2559 {
2560 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2561 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2562
2563 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2564 lstmInputParams.m_ProjectionBias = &projectionBias;
2565 }
2566
2567 armnn::ConstTensor cellToForgetWeights;
2568 armnn::ConstTensor cellToOutputWeights;
2569 if (lstmDescriptor.m_PeepholeEnabled)
2570 {
2571 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2572 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2573
2574 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2575 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2576 }
2577
Jan Eilersf8c62972019-07-17 11:07:49 +01002578 armnn::ConstTensor inputLayerNormWeights;
2579 armnn::ConstTensor forgetLayerNormWeights;
2580 armnn::ConstTensor cellLayerNormWeights;
2581 armnn::ConstTensor outputLayerNormWeights;
2582 if (lstmDescriptor.m_LayerNormEnabled)
2583 {
2584 if (!lstmDescriptor.m_CifgEnabled)
2585 {
2586 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2587 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2588 }
2589 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2590 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2591 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2592
2593 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2594 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2595 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2596 }
2597
Jim Flynn11af3752019-03-19 17:22:29 +00002598 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2599
2600 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2601 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2602
2603 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2604 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2605
2606 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2607 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2608
2609 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2610 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2611
2612 RegisterInputSlots(graph, layerIndex, layer);
2613 RegisterOutputSlots(graph, layerIndex, layer);
2614}
2615
James Conroy8d333182020-05-13 10:27:58 +01002616armnn::QLstmDescriptor Deserializer::GetQLstmDescriptor(Deserializer::QLstmDescriptorPtr qLstmDescriptor)
2617{
2618 armnn::QLstmDescriptor desc;
2619
2620 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2621 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2622 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2623 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2624
2625 desc.m_CellClip = qLstmDescriptor->cellClip();
2626 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2627
2628 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2629 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2630 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2631 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2632
2633 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2634 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2635
2636 return desc;
2637}
2638
2639void Deserializer::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
2640{
2641 CHECK_LAYERS(graph, 0, layerIndex);
2642
2643 auto inputs = GetInputs(graph, layerIndex);
2644 CHECK_VALID_SIZE(inputs.size(), 3);
2645
2646 auto outputs = GetOutputs(graph, layerIndex);
2647 CHECK_VALID_SIZE(outputs.size(), 3);
2648
2649 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2650 auto layerName = GetLayerName(graph, layerIndex);
2651 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2652 auto flatBufferInputParams = flatBufferLayer->inputParams();
2653
2654 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2655 armnn::LstmInputParams qLstmInputParams;
2656
2657 // Mandatory params
2658 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2659 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2660 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2661 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2662 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2663 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2664 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2665 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2666 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2667
2668 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2669 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2670 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2671 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2672 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2673 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2674 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2675 qLstmInputParams.m_CellBias = &cellBias;
2676 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2677
2678 // Optional CIFG params
2679 armnn::ConstTensor inputToInputWeights;
2680 armnn::ConstTensor recurrentToInputWeights;
2681 armnn::ConstTensor inputGateBias;
2682
2683 if (!qLstmDescriptor.m_CifgEnabled)
2684 {
2685 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2686 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2687 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2688
2689 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2690 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2691 qLstmInputParams.m_InputGateBias = &inputGateBias;
2692 }
2693
2694 // Optional projection params
2695 armnn::ConstTensor projectionWeights;
2696 armnn::ConstTensor projectionBias;
2697
2698 if (qLstmDescriptor.m_ProjectionEnabled)
2699 {
2700 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2701 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2702
2703 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2704 qLstmInputParams.m_ProjectionBias = &projectionBias;
2705 }
2706
2707 // Optional peephole params
2708 armnn::ConstTensor cellToInputWeights;
2709 armnn::ConstTensor cellToForgetWeights;
2710 armnn::ConstTensor cellToOutputWeights;
2711
2712 if (qLstmDescriptor.m_PeepholeEnabled)
2713 {
2714 if (!qLstmDescriptor.m_CifgEnabled)
2715 {
2716 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2717 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2718 }
2719
2720 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2721 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2722
2723 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2724 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2725 }
2726
2727 // Optional layer norm params
2728 armnn::ConstTensor inputLayerNormWeights;
2729 armnn::ConstTensor forgetLayerNormWeights;
2730 armnn::ConstTensor cellLayerNormWeights;
2731 armnn::ConstTensor outputLayerNormWeights;
2732
2733 if (qLstmDescriptor.m_LayerNormEnabled)
2734 {
2735 if (!qLstmDescriptor.m_CifgEnabled)
2736 {
2737 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2738 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2739 }
2740
2741 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2742 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2743 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2744
2745 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2746 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2747 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2748 }
2749
2750 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2751
2752 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2753 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2754
2755 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2756 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2757
2758 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2759 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2760
2761 RegisterInputSlots(graph, layerIndex, layer);
2762 RegisterOutputSlots(graph, layerIndex, layer);
2763}
2764
Jan Eilers5b01a892019-07-23 09:47:43 +01002765void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2766{
2767 CHECK_LAYERS(graph, 0, layerIndex);
2768
2769 auto inputs = GetInputs(graph, layerIndex);
2770 CHECK_VALID_SIZE(inputs.size(), 3);
2771
2772 auto outputs = GetOutputs(graph, layerIndex);
2773 CHECK_VALID_SIZE(outputs.size(), 2);
2774
2775 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2776 auto layerName = GetLayerName(graph, layerIndex);
2777 auto flatBufferInputParams = flatBufferLayer->inputParams();
2778
2779 armnn::QuantizedLstmInputParams lstmInputParams;
2780
2781 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2782 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2783 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2784 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2785 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2786 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2787 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2788 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2789 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2790 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2791 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2792 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2793
2794 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2795 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2796 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2797 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2798 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2799 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2800 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2801 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2802 lstmInputParams.m_InputGateBias = &inputGateBias;
2803 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2804 lstmInputParams.m_CellBias = &cellBias;
2805 lstmInputParams.m_OutputGateBias = &outputGateBias;
2806
2807 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2808
2809 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2810 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2811
2812 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2813 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2814
2815 RegisterInputSlots(graph, layerIndex, layer);
2816 RegisterOutputSlots(graph, layerIndex, layer);
2817}
2818
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002819void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2820{
2821 CHECK_LAYERS(graph, 0, layerIndex);
2822
2823 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2824 CHECK_VALID_SIZE(inputs.size(), 1);
2825
2826 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2827 CHECK_VALID_SIZE(outputs.size(), 1);
2828
2829 const std::string layerName = GetLayerName(graph, layerIndex);
2830 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2831
2832 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2833 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2834
2835 RegisterInputSlots(graph, layerIndex, layer);
2836 RegisterOutputSlots(graph, layerIndex, layer);
2837}
2838
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002839void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2840{
2841 CHECK_LAYERS(graph, 0, layerIndex);
2842
2843 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2844 CHECK_VALID_SIZE(inputs.size(), 2);
2845
2846 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2847 CHECK_VALID_SIZE(outputs.size(), 1);
2848
2849 const std::string layerName = GetLayerName(graph, layerIndex);
2850 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2851
2852 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2853 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2854
2855 RegisterInputSlots(graph, layerIndex, layer);
2856 RegisterOutputSlots(graph, layerIndex, layer);
2857}
2858
Sadik Armaganeff363d2019-04-05 15:25:46 +01002859void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2860{
2861 CHECK_LAYERS(graph, 0, layerIndex);
2862 auto inputs = GetInputs(graph, layerIndex);
2863 CHECK_LOCATION();
2864 CHECK_VALID_SIZE(inputs.size(), 2);
2865
2866 auto outputs = GetOutputs(graph, layerIndex);
2867 CHECK_VALID_SIZE(outputs.size(), 2);
2868
2869 auto layerName = GetLayerName(graph, layerIndex);
2870 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2871
2872 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2873 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2874
2875 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2876 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2877
2878 RegisterInputSlots(graph, layerIndex, layer);
2879 RegisterOutputSlots(graph, layerIndex, layer);
2880}
2881
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002882void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2883{
2884 CHECK_LAYERS(graph, 0, layerIndex);
2885 auto inputs = GetInputs(graph, layerIndex);
2886 CHECK_LOCATION();
2887 CHECK_VALID_SIZE(inputs.size(), 2);
2888
2889 auto outputs = GetOutputs(graph, layerIndex);
2890 CHECK_VALID_SIZE(outputs.size(), 1);
2891
2892 auto layerName = GetLayerName(graph, layerIndex);
2893 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2894
2895 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2897
2898 RegisterInputSlots(graph, layerIndex, layer);
2899 RegisterOutputSlots(graph, layerIndex, layer);
2900}
2901
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002902void Deserializer::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
2903{
2904 CHECK_LAYERS(graph, 0, layerIndex);
2905
2906 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2907
2908 auto inputs = GetInputs(graph, layerIndex);
2909 CHECK_VALID_SIZE(inputs.size(), 1);
2910
2911 auto outputs = GetOutputs(graph, layerIndex);
2912 CHECK_VALID_SIZE(outputs.size(), 1);
2913 auto outputInfo = ToTensorInfo(outputs[0]);
2914
2915 auto layerName = GetLayerName(graph, layerIndex);
2916 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2917
2918 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2919 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2920
2921 RegisterInputSlots(graph, layerIndex, layer);
2922 RegisterOutputSlots(graph, layerIndex, layer);
2923}
2924
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002925void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2926{
2927 CHECK_LAYERS(graph, 0, layerIndex);
2928
2929 auto inputs = GetInputs(graph, layerIndex);
2930 CHECK_VALID_SIZE(inputs.size(), 1);
2931
2932 auto outputs = GetOutputs(graph, layerIndex);
2933 CHECK_VALID_SIZE(outputs.size(), 1);
2934
2935 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2936 auto layerName = GetLayerName(graph, layerIndex);
2937 auto serializerDescriptor = serializerLayer->descriptor();
2938
2939 armnn::TransposeConvolution2dDescriptor descriptor;
2940 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2941 descriptor.m_PadRight = serializerDescriptor->padRight();
2942 descriptor.m_PadTop = serializerDescriptor->padTop();
2943 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2944 descriptor.m_StrideX = serializerDescriptor->strideX();
2945 descriptor.m_StrideY = serializerDescriptor->strideY();;
2946 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2947 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2948
2949 // weights & biases
2950 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2951 armnn::Optional<armnn::ConstTensor> optionalBiases;
2952 if (descriptor.m_BiasEnabled)
2953 {
2954 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2955 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2956 }
2957
2958 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2959 weights,
2960 optionalBiases,
2961 layerName.c_str());
2962
2963 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2964 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2965
2966 RegisterInputSlots(graph, layerIndex, layer);
2967 RegisterOutputSlots(graph, layerIndex, layer);
2968}
2969
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002970void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2971{
2972 CHECK_LAYERS(graph, 0, layerIndex);
2973 auto inputs = GetInputs(graph, layerIndex);
2974
2975 auto outputs = GetOutputs(graph, layerIndex);
2976 CHECK_VALID_SIZE(outputs.size(), 1);
2977
2978 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2979 unsigned int axis = flatBufferDescriptor->axis();
2980 unsigned int numInputs = flatBufferDescriptor->numInputs();
2981 CHECK_VALID_SIZE(inputs.size(), numInputs);
2982
2983 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2984 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2985 flatBufferInputShape->begin() + flatBufferInputShape->size());
2986
2987 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2988 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2989
2990 for (unsigned int i=0; i<inputs.size(); ++i)
2991 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002992 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002993 if (descriptor.m_InputShape != inputShape)
2994 {
2995 std::stringstream ss;
2996 ss << "Shape of input "
2997 << i
2998 << " "
2999 << inputShape
3000 << " does not equal defined input shape "
3001 << descriptor.m_InputShape
3002 << ": "
3003 << CHECK_LOCATION().AsString();
3004 throw ParseException(ss.str());
3005 }
3006 }
3007
3008 auto layerName = GetLayerName(graph, layerIndex);
3009 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3010
3011 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3012 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3013
3014 RegisterInputSlots(graph, layerIndex, layer);
3015 RegisterOutputSlots(graph, layerIndex, layer);
3016}
3017
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003018void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
3019{
3020 CHECK_LAYERS(graph, 0, layerIndex);
3021
3022 auto inputs = GetInputs(graph, layerIndex);
3023 auto outputs = GetOutputs(graph, layerIndex);
3024
3025 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3026 auto fbDescriptor = fbLayer->descriptor();
3027
3028 armnn::StandInDescriptor descriptor;
3029 descriptor.m_NumInputs = fbDescriptor->numInputs();
3030 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3031
3032 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3033 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3034
3035 const std::string layerName = GetLayerName(graph, layerIndex);
3036 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3037
3038 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3039 {
3040 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3041 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3042 }
3043
3044 RegisterInputSlots(graph, layerIndex, layer);
3045 RegisterOutputSlots(graph, layerIndex, layer);
3046}
3047
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003048} // namespace armnnDeserializer