blob: 67836c5843ebffe18f003dbd0bc87ee7e632e22e [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100188 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100191 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000192 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000193 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100194 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000195 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100197 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000198 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000199 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000200 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000201 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000202 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000203 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000204 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000205 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000206 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100207 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000208 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100209 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000210 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000211 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000212 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
213 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100214 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100215 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000216 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000217 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000218 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000219 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000220 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100221 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000222 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100223 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000224 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000225 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100226 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000227 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100228 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000229 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000230 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100231 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000232 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100233 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000234 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000235 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100236 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100237 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000238}
239
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000240Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000241{
242 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
243
244 switch(layerType)
245 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100246 case Layer::Layer_AbsLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000248 case Layer::Layer_ActivationLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000250 case Layer::Layer_AdditionLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100252 case Layer::Layer_ArgMinMaxLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000254 case Layer::Layer_BatchToSpaceNdLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000256 case Layer::Layer_BatchNormalizationLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100258 case Layer::Layer_ConcatLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000260 case Layer::Layer_ConstantLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000262 case Layer::Layer_Convolution2dLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100264 case Layer::Layer_DepthToSpaceLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000266 case Layer::Layer_DepthwiseConvolution2dLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000268 case Layer::Layer_DequantizeLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000270 case Layer::Layer_DetectionPostProcessLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000272 case Layer::Layer_DivisionLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000274 case Layer::Layer_EqualLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000276 case Layer::Layer_FullyConnectedLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000278 case Layer::Layer_FloorLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000280 case Layer::Layer_GatherLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000282 case Layer::Layer_GreaterLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000284 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000285 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100286 case Layer::Layer_InstanceNormalizationLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000288 case Layer::Layer_L2NormalizationLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100290 case Layer::Layer_LogSoftmaxLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000292 case Layer::Layer_LstmLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000294 case Layer::Layer_MeanLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000296 case Layer::Layer_MinimumLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000298 case Layer::Layer_MaximumLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100300 case Layer::Layer_MergeLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000302 case Layer::Layer_MergerLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000304 case Layer::Layer_MultiplicationLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000306 case Layer::Layer_NormalizationLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000308 case Layer::Layer_OutputLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000310 case Layer::Layer_PadLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000312 case Layer::Layer_PermuteLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000314 case Layer::Layer_Pooling2dLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100316 case Layer::Layer_PreluLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000318 case Layer::Layer_QuantizeLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100320 case Layer::Layer_QuantizedLstmLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000322 case Layer::Layer_ReshapeLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000324 case Layer::Layer_ResizeBilinearLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100326 case Layer::Layer_ResizeLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000328 case Layer::Layer_RsqrtLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100330 case Layer::Layer_SliceLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000332 case Layer::Layer_SoftmaxLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000334 case Layer::Layer_SpaceToBatchNdLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100336 case Layer::Layer_SpaceToDepthLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000338 case Layer::Layer_SplitterLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100340 case Layer::Layer_StackLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000342 case Layer::Layer_StridedSliceLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000344 case Layer::Layer_SubtractionLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100346 case Layer::Layer_SwitchLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100348 case Layer::Layer_TransposeConvolution2dLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000350 case Layer::Layer_NONE:
351 default:
352 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100353 boost::format("Layer type %1% not recognized") %
354 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000355 }
356}
357
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000358std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
359{
360 auto layer = GetBaseLayer(graph, index);
361 assert(layer);
362 return layer->layerName()->str();
363}
364
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000365int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000366{
367 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
368
369 if (layerType == Layer::Layer_InputLayer)
370 {
371 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
372 }
373 else if ( layerType == Layer::Layer_OutputLayer )
374 {
375 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
376 }
377 return 0;
378}
379
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000380armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000381{
382 switch (dataLayout)
383 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000384 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000385 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000386 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000387 default:
388 return armnn::DataLayout::NCHW;
389 }
390}
391
Mike Kellyaf484012019-02-20 16:53:11 +0000392armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
393{
394 switch (function)
395 {
396 case armnnSerializer::ActivationFunction_Sigmoid:
397 return armnn::ActivationFunction::Sigmoid;
398 case armnnSerializer::ActivationFunction_TanH:
399 return armnn::ActivationFunction::TanH;
400 case armnnSerializer::ActivationFunction_Linear:
401 return armnn::ActivationFunction::Linear;
402 case armnnSerializer::ActivationFunction_ReLu:
403 return armnn::ActivationFunction::ReLu;
404 case armnnSerializer::ActivationFunction_BoundedReLu:
405 return armnn::ActivationFunction::BoundedReLu;
406 case armnnSerializer::ActivationFunction_LeakyReLu:
407 return armnn::ActivationFunction::LeakyReLu;
408 case armnnSerializer::ActivationFunction_Abs:
409 return armnn::ActivationFunction::Abs;
410 case armnnSerializer::ActivationFunction_Sqrt:
411 return armnn::ActivationFunction::Sqrt;
412 case armnnSerializer::ActivationFunction_Square:
413 return armnn::ActivationFunction::Square;
414 default:
415 return armnn::ActivationFunction::Sigmoid;
416 }
417}
418
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100419armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
420{
421 switch (function)
422 {
423 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
424 return armnn::ArgMinMaxFunction::Max;
425 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
426 default:
427 return armnn::ArgMinMaxFunction::Min;
428 }
429}
430
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100431armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
432{
433 switch (method)
434 {
435 case armnnSerializer::ResizeMethod_NearestNeighbor:
436 return armnn::ResizeMethod::NearestNeighbor;
437 case armnnSerializer::ResizeMethod_Bilinear:
438 return armnn::ResizeMethod::NearestNeighbor;
439 default:
440 return armnn::ResizeMethod::NearestNeighbor;
441 }
442}
443
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000444armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000445{
446 armnn::DataType type;
447 CHECK_TENSOR_PTR(tensorPtr);
448
449 switch (tensorPtr->dataType())
450 {
451 case DataType_QuantisedAsymm8:
452 type = armnn::DataType::QuantisedAsymm8;
453 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000454 case DataType_QuantisedSymm16:
455 type = armnn::DataType::QuantisedSymm16;
456 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000457 case DataType_Signed32:
458 type = armnn::DataType::Signed32;
459 break;
Kevin May43a799c2019-02-08 16:31:42 +0000460 case DataType_Float32:
461 type = armnn::DataType::Float32;
462 break;
463 case DataType_Float16:
464 type = armnn::DataType::Float16;
465 break;
466 case DataType_Boolean:
467 type = armnn::DataType::Boolean;
468 break;
469 default:
470 {
471 CheckLocation location = CHECK_LOCATION();
472 throw ParseException(
473 boost::str(
474 boost::format("Unsupported data type %1% = %2%. %3%") %
475 tensorPtr->dataType() %
476 EnumNameDataType(tensorPtr->dataType()) %
477 location.AsString()));
478 }
479 }
480 float quantizationScale = tensorPtr->quantizationScale();
481 int32_t quantizationOffset = tensorPtr->quantizationOffset();
482
483 auto dimensions = tensorPtr->dimensions();
484 unsigned int size = dimensions->size();
485 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
486
487 // two statements (on purpose) for easier debugging:
488 armnn::TensorInfo result(size,
489 outputDims.data(),
490 type,
491 quantizationScale,
492 quantizationOffset);
493 return result;
494}
495
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000496armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000497{
498 CHECK_CONST_TENSOR_PTR(constTensorPtr);
499 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
500
501 switch (constTensorPtr->data_type())
502 {
503 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000504 {
505 auto byteData = constTensorPtr->data_as_ByteData()->data();
506 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
507 return armnn::ConstTensor(tensorInfo, byteData->data());
508 }
Mike Kellya0766c32019-02-19 17:22:07 +0000509 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000510 {
511 auto shortData = constTensorPtr->data_as_ShortData()->data();
512 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
513 return armnn::ConstTensor(tensorInfo, shortData->data());
514 }
Mike Kellya0766c32019-02-19 17:22:07 +0000515 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000516 {
517 auto intData = constTensorPtr->data_as_IntData()->data();
518 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
519 return armnn::ConstTensor(tensorInfo, intData->data());
520 }
Mike Kellya0766c32019-02-19 17:22:07 +0000521 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000522 {
523 auto longData = constTensorPtr->data_as_LongData()->data();
524 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
525 return armnn::ConstTensor(tensorInfo, longData->data());
526 }
Mike Kellya0766c32019-02-19 17:22:07 +0000527 default:
528 {
529 CheckLocation location = CHECK_LOCATION();
530 throw ParseException(
531 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
532 constTensorPtr->data_type() %
533 EnumNameConstTensorData(constTensorPtr->data_type()) %
534 location.AsString()));
535 }
536 }
537}
538
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000539Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000540 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000541{
542 CHECK_LAYERS(graphPtr, 0, layerIndex);
543 auto layer = GetBaseLayer(graphPtr, layerIndex);
544 const auto& numInputs = layer->inputSlots()->size();
545
546 TensorRawPtrVector result(numInputs);
547
548 for (unsigned int i=0; i<numInputs; ++i)
549 {
550 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
551 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
552 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
553 }
554 return result;
555}
556
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000557Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000558 unsigned int layerIndex)
559{
560 CHECK_LAYERS(graphPtr, 0, layerIndex);
561 auto layer = GetBaseLayer(graphPtr, layerIndex);
562 const auto& numOutputs = layer->outputSlots()->size();
563
564 TensorRawPtrVector result(numOutputs);
565
566 for (unsigned int i=0; i<numOutputs; ++i)
567 {
568 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
569 }
570 return result;
571}
572
Derek Lamberti8ddae332019-02-21 16:29:43 +0000573void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000574{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000575 CHECK_LAYERS(graph, 0, layerIndex);
576 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000577 throw ParseException(
578 boost::str(
579 boost::format("Layer not supported. "
580 "layerIndex: %1% "
581 "layerName: %2% / %3%") %
582 layerIndex %
583 layerName %
584 CHECK_LOCATION().AsString()));
585}
586
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000587void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000588{
589 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000590 m_InputBindings.clear();
591 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000592}
593
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000594IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000595{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000596 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000597}
598
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000599IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000600{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000601 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000602}
603
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000604void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000605{
606 delete parser;
607}
608
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000609INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000610{
611 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000612 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
613 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000614}
615
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000616armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000617{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000618 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000619 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
620 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
621 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000622}
623
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000624Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000625{
626 if (binaryContent == nullptr)
627 {
628 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
629 CHECK_LOCATION().AsString()));
630 }
631 flatbuffers::Verifier verifier(binaryContent, len);
632 if (verifier.VerifyBuffer<SerializedGraph>() == false)
633 {
634 throw ParseException(
635 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
636 "flatbuffers format. size:%1% %2%") %
637 len %
638 CHECK_LOCATION().AsString()));
639 }
640 return GetSerializedGraph(binaryContent);
641}
642
Derek Lamberti8ddae332019-02-21 16:29:43 +0000643INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000644{
645 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000646 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000647 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000648 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000649 {
650 if (layer->layer_type() != Layer_InputLayer &&
651 layer->layer_type() != Layer_OutputLayer)
652 {
653 // lookup and call the parser function
654 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000655 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000656 }
657 ++layerIndex;
658 }
659
Derek Lamberti8ddae332019-02-21 16:29:43 +0000660 SetupInputLayers(graph);
661 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000662
663 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100664 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000665 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100666 Connections& connections = graphIt.second;
667 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000668 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100669 const unsigned int outputSlotIndex = outputIt.first;
670 IOutputSlot* outputSlot = outputIt.second;
671 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000672 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100673 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000674 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100675 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000676 }
Kevin May43a799c2019-02-08 16:31:42 +0000677 }
678 }
679 }
680
681 return std::move(m_Network);
682}
683
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000684BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000685 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000686{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000687 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000688 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000689 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000690 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000691 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000692 }
693 }
694 throw ParseException(
695 boost::str(
696 boost::format("No input binding found for layer:%1% / %2%") %
697 name %
698 CHECK_LOCATION().AsString()));
699}
700
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000701BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000702 const std::string& name) const
703{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000704 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000705 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000706 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000707 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000708 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000709 }
710 }
711 throw ParseException(
712 boost::str(
713 boost::format("No output binding found for layer:%1% / %2%") %
714 name %
715 CHECK_LOCATION().AsString()));
716}
717
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100718unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
719{
720 for (unsigned int i = 0; i < graph->layers()->size(); i++)
721 {
722 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
723 if (layer->index() == targetIndex)
724 {
725 return i;
726 }
727 }
728 throw ParseException("Layer with given index not found");
729}
730
Derek Lamberti8ddae332019-02-21 16:29:43 +0000731void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000732{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000733 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100734 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000735 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100736 m_InputBindings.reserve(numInputs);
737
738 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000739 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100740 const unsigned int inputId = graph->inputIds()->Get(i);
741 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
742 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000743
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100744 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
745 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
746 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000747
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100748 IConnectableLayer* inputLayer =
749 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000750
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100751 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
752 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
753 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
754
Derek Lamberti8ddae332019-02-21 16:29:43 +0000755 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100756 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000757 }
758}
759
Derek Lamberti8ddae332019-02-21 16:29:43 +0000760void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000761{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000762 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100763 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000764 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100765 m_OutputBindings.reserve(numOutputs);
766
767 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000768 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100769 const unsigned int outputId = graph->outputIds()->Get(i);
770 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
771 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000772
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100773 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
774 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
775 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000776
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100777 IConnectableLayer* outputLayer =
778 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000779
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100780 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
781
782 unsigned int sourceLayerIndex =
783 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
784 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
785 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
786
Derek Lamberti8ddae332019-02-21 16:29:43 +0000787 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100788 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000789 }
790}
791
Derek Lamberti8ddae332019-02-21 16:29:43 +0000792void Deserializer::RegisterOutputSlots(GraphPtr graph,
793 uint32_t layerIndex,
794 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000795{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000797 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100798 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
799 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000800 {
801 throw ParseException(
802 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
803 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100804 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000805 layer->GetNumOutputSlots() %
806 layerIndex %
807 CHECK_LOCATION().AsString()));
808 }
809
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100810 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000811 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100812 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
813 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
814 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
815 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000816 }
817}
818
Derek Lamberti8ddae332019-02-21 16:29:43 +0000819void Deserializer::RegisterInputSlots(GraphPtr graph,
820 uint32_t layerIndex,
821 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000822{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000823 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000824 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100825 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
826 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000827 {
828 throw ParseException(
829 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
830 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100831 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000832 layer->GetNumInputSlots() %
833 layerIndex %
834 CHECK_LOCATION().AsString()));
835 }
836
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100837 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000838 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100839 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
840 auto fbConnection = fbInputSlot->connection();
841 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
842 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000843 }
844}
845
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000846void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
847 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100848 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000849{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100850 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000851 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100852 m_GraphConnections[sourceLayerIndex] = Connections();
853 }
854
855 Connections& connections = m_GraphConnections[sourceLayerIndex];
856 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
857 {
858 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000859 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000860 else
861 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100862 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000863 }
864}
Kevin May43a799c2019-02-08 16:31:42 +0000865
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000866void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100867 uint32_t outputSlotIndex,
868 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000869{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100870 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
871 {
872 m_GraphConnections[sourceLayerIndex] = Connections();
873 }
874
875 Connections& connections = m_GraphConnections[sourceLayerIndex];
876 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
877 {
878 throw ParseException("Same output slot index processed twice");
879 }
880
881 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000882}
883
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100884void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
885{
886 CHECK_LAYERS(graph, 0, layerIndex);
887 auto inputs = GetInputs(graph, layerIndex);
888 CHECK_LOCATION();
889 CHECK_VALID_SIZE(inputs.size(), 1);
890
891 auto outputs = GetOutputs(graph, layerIndex);
892 CHECK_VALID_SIZE(outputs.size(), 1);
893
894 auto layerName = GetLayerName(graph, layerIndex);
895
896 IConnectableLayer* layer = m_Network->AddAbsLayer(layerName.c_str());
897 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
898 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
899
900 RegisterInputSlots(graph, layerIndex, layer);
901 RegisterOutputSlots(graph, layerIndex, layer);
902}
903
Derek Lamberti8ddae332019-02-21 16:29:43 +0000904void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000905{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906 CHECK_LAYERS(graph, 0, layerIndex);
907 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000908 CHECK_LOCATION();
909 CHECK_VALID_SIZE(inputs.size(), 1);
910
Derek Lamberti8ddae332019-02-21 16:29:43 +0000911 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000912 CHECK_VALID_SIZE(outputs.size(), 1);
913
Derek Lamberti8ddae332019-02-21 16:29:43 +0000914 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000915 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000916 auto serializerDescriptor = serializerLayer->descriptor();
917
918 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +0900919 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +0000920 descriptor.m_A = serializerDescriptor->a();
921 descriptor.m_B = serializerDescriptor->b();
922
923 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
924 layerName.c_str());
925 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
926 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
927
Derek Lamberti8ddae332019-02-21 16:29:43 +0000928 RegisterInputSlots(graph, layerIndex, layer);
929 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000930}
931
Derek Lamberti8ddae332019-02-21 16:29:43 +0000932void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000933{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000934 CHECK_LAYERS(graph, 0, layerIndex);
935 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000936 CHECK_LOCATION();
937 CHECK_VALID_SIZE(inputs.size(), 2);
938
Derek Lamberti8ddae332019-02-21 16:29:43 +0000939 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000940 CHECK_VALID_SIZE(outputs.size(), 1);
941
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000942 auto layerName = GetLayerName(graph, layerIndex);
943 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000944
945 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
946 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
947
Derek Lamberti8ddae332019-02-21 16:29:43 +0000948 RegisterInputSlots(graph, layerIndex, layer);
949 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000950}
951
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100952void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
953{
954 CHECK_LAYERS(graph, 0, layerIndex);
955 auto inputs = GetInputs(graph, layerIndex);
956 CHECK_LOCATION();
957 CHECK_VALID_SIZE(inputs.size(), 1);
958
959 auto outputs = GetOutputs(graph, layerIndex);
960 CHECK_VALID_SIZE(outputs.size(), 1);
961
962 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
963 auto serializerDescriptor = serializerLayer->descriptor();
964
965 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +0900966 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100967 descriptor.m_Axis = serializerDescriptor->axis();
968 auto layerName = GetLayerName(graph, layerIndex);
969 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
970
971 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
972 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
973
974 RegisterInputSlots(graph, layerIndex, layer);
975 RegisterOutputSlots(graph, layerIndex, layer);
976}
977
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000978void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
979{
980 CHECK_LAYERS(graph, 0, layerIndex);
981
982 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
983 CHECK_VALID_SIZE(inputs.size(), 1);
984
985 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
986 CHECK_VALID_SIZE(outputs.size(), 1);
987
988 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
989 auto flatBufferCrops = flatBufferDescriptor->crops();
990 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
991
992 if (flatBufferCrops->Length() % 2 != 0)
993 {
994 throw ParseException(boost::str(
995 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
996 }
997
998 std::vector<std::pair<unsigned int, unsigned int>> crops;
999 crops.reserve(flatBufferCrops->Length() / 2);
1000 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1001 {
1002 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1003 }
1004
1005 armnn::BatchToSpaceNdDescriptor descriptor;
1006 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1007 descriptor.m_BlockShape =
1008 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1009 descriptor.m_Crops = crops;
1010
1011 auto layerName = GetLayerName(graph, layerIndex);
1012 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1013
1014 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1015 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1016
1017 RegisterInputSlots(graph, layerIndex, layer);
1018 RegisterOutputSlots(graph, layerIndex, layer);
1019}
1020
ruoyan018e7fa232019-02-28 15:09:07 +00001021void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1022{
1023 CHECK_LAYERS(graph, 0, layerIndex);
1024
1025 auto inputs = GetInputs(graph, layerIndex);
1026 CHECK_VALID_SIZE(inputs.size(), 1);
1027
1028 auto outputs = GetOutputs(graph, layerIndex);
1029 CHECK_VALID_SIZE(outputs.size(), 1);
1030 auto outputInfo = ToTensorInfo(outputs[0]);
1031
ruoyan015c7ab052019-03-04 14:48:02 +00001032 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001033
1034 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1035 auto serializerDescriptor = serializerLayer->descriptor();
1036
1037 armnn::BatchNormalizationDescriptor descriptor;
1038 descriptor.m_Eps = serializerDescriptor->eps();
1039 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1040
1041 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1042 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1043 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1044 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1045
1046 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1047 mean,
1048 variance,
1049 beta,
1050 gamma,
1051 layerName.c_str());
1052 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1053
1054 RegisterInputSlots(graph, layerIndex, layer);
1055 RegisterOutputSlots(graph, layerIndex, layer);
1056}
1057
Conor Kennedy76277882019-02-26 08:29:54 +00001058void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1059{
1060 CHECK_LAYERS(graph, 0, layerIndex);
1061 CHECK_LOCATION();
1062
1063 auto outputs = GetOutputs(graph, layerIndex);
1064 CHECK_VALID_SIZE(outputs.size(), 1);
1065
1066 auto layerName = GetLayerName(graph, layerIndex);
1067
1068 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1069 auto serializerInput = serializerLayer->input();
1070
1071 armnn::ConstTensor input = ToConstTensor(serializerInput);
1072
1073 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1074
1075 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1076 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1077
1078 RegisterOutputSlots(graph, layerIndex, layer);
1079}
1080
Derek Lamberti8ddae332019-02-21 16:29:43 +00001081void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001082{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001083 CHECK_LAYERS(graph, 0, layerIndex);
1084 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001085 CHECK_LOCATION();
1086 CHECK_VALID_SIZE(inputs.size(), 1);
1087
Derek Lamberti8ddae332019-02-21 16:29:43 +00001088 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001089 CHECK_VALID_SIZE(outputs.size(), 1);
1090
Derek Lamberti8ddae332019-02-21 16:29:43 +00001091 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001092 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001093 auto serializerDescriptor = serializerLayer->descriptor();
1094
1095 armnn::Convolution2dDescriptor descriptor;
1096 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1097 descriptor.m_PadRight = serializerDescriptor->padRight();
1098 descriptor.m_PadTop = serializerDescriptor->padTop();
1099 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1100 descriptor.m_StrideX = serializerDescriptor->strideX();
1101 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001102 descriptor.m_DilationX = serializerDescriptor->dilationX();
1103 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001104 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1105 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1106
1107 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1108 armnn::ConstTensor biases;
1109
Matteo Martincighfc598e12019-05-14 10:36:13 +01001110 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001111 if (descriptor.m_BiasEnabled)
1112 {
1113 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001114 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001115 }
1116 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1117 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001118 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001119 layerName.c_str());
1120 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1121 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1122
Derek Lamberti8ddae332019-02-21 16:29:43 +00001123 RegisterInputSlots(graph, layerIndex, layer);
1124 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001125}
1126
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001127void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1128{
1129 CHECK_LAYERS(graph, 0, layerIndex);
1130
1131 auto inputs = GetInputs(graph, layerIndex);
1132 CHECK_VALID_SIZE(inputs.size(), 1);
1133
1134 auto outputs = GetOutputs(graph, layerIndex);
1135 CHECK_VALID_SIZE(outputs.size(), 1);
1136
1137 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1138
1139 armnn::DepthToSpaceDescriptor descriptor;
1140 descriptor.m_BlockSize = fbDescriptor->blockSize();
1141 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1142
1143 auto layerName = GetLayerName(graph, layerIndex);
1144 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1145
1146 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1147 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1148
1149 RegisterInputSlots(graph, layerIndex, layer);
1150 RegisterOutputSlots(graph, layerIndex, layer);
1151}
1152
Derek Lamberti8ddae332019-02-21 16:29:43 +00001153void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001154{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001155 CHECK_LAYERS(graph, 0, layerIndex);
1156 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001157 CHECK_LOCATION();
1158 CHECK_VALID_SIZE(inputs.size(), 1);
1159
Derek Lamberti8ddae332019-02-21 16:29:43 +00001160 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001161 CHECK_VALID_SIZE(outputs.size(), 1);
1162
Derek Lamberti8ddae332019-02-21 16:29:43 +00001163 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001164 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001165 auto serializerDescriptor = serializerLayer->descriptor();
1166
1167 armnn::DepthwiseConvolution2dDescriptor descriptor;
1168 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1169 descriptor.m_PadRight = serializerDescriptor->padRight();
1170 descriptor.m_PadTop = serializerDescriptor->padTop();
1171 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1172 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001173 descriptor.m_StrideY = serializerDescriptor->strideY();
1174 descriptor.m_DilationX = serializerDescriptor->dilationX();
1175 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001176 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1177 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1178
1179 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1180 armnn::ConstTensor biases;
1181
Matteo Martincighfc598e12019-05-14 10:36:13 +01001182 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001183 if (descriptor.m_BiasEnabled)
1184 {
1185 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001186 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001187 }
1188 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1189 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001190 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001191 layerName.c_str());
1192
1193 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1194 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1195
Derek Lamberti8ddae332019-02-21 16:29:43 +00001196 RegisterInputSlots(graph, layerIndex, layer);
1197 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001198}
1199
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001200void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1201{
1202 CHECK_LAYERS(graph, 0, layerIndex);
1203 auto inputs = GetInputs(graph, layerIndex);
1204 CHECK_LOCATION();
1205 CHECK_VALID_SIZE(inputs.size(), 2);
1206
1207 auto outputs = GetOutputs(graph, layerIndex);
1208 CHECK_VALID_SIZE(outputs.size(), 4);
1209
1210 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1211 auto layerName = GetLayerName(graph, layerIndex);
1212 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1213
1214 armnn::DetectionPostProcessDescriptor descriptor;
1215 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1216 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1217 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1218 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1219 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1220 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1221 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1222 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1223 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1224 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1225 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1226
1227 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1228
1229 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1230 anchors,
1231 layerName.c_str());
1232
1233 for (unsigned int i = 0; i < 4; i++)
1234 {
1235 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1236 }
1237
1238 RegisterInputSlots(graph, layerIndex, layer);
1239 RegisterOutputSlots(graph, layerIndex, layer);
1240}
1241
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001242void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1243{
1244 CHECK_LAYERS(graph, 0, layerIndex);
1245 auto inputs = GetInputs(graph, layerIndex);
1246 CHECK_LOCATION();
1247 CHECK_VALID_SIZE(inputs.size(), 2);
1248
1249 auto outputs = GetOutputs(graph, layerIndex);
1250 CHECK_VALID_SIZE(outputs.size(), 1);
1251
1252 auto layerName = GetLayerName(graph, layerIndex);
1253 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1254
1255 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1256 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1257
1258 RegisterInputSlots(graph, layerIndex, layer);
1259 RegisterOutputSlots(graph, layerIndex, layer);
1260}
1261
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001262void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1263{
1264 CHECK_LAYERS(graph, 0, layerIndex);
1265 auto inputs = GetInputs(graph, layerIndex);
1266 CHECK_LOCATION();
1267 CHECK_VALID_SIZE(inputs.size(), 2);
1268
1269 auto outputs = GetOutputs(graph, layerIndex);
1270 CHECK_VALID_SIZE(outputs.size(), 1);
1271
1272 auto layerName = GetLayerName(graph, layerIndex);
1273 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1274
1275 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1276 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1277
1278 RegisterInputSlots(graph, layerIndex, layer);
1279 RegisterOutputSlots(graph, layerIndex, layer);
1280}
1281
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001282void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1283{
1284 CHECK_LAYERS(graph, 0, layerIndex);
1285 auto inputs = GetInputs(graph, layerIndex);
1286 CHECK_LOCATION();
1287 CHECK_VALID_SIZE(inputs.size(), 2);
1288
1289 auto outputs = GetOutputs(graph, layerIndex);
1290 CHECK_VALID_SIZE(outputs.size(), 1);
1291
1292 auto layerName = GetLayerName(graph, layerIndex);
1293 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1294
1295 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1296 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1297
1298 RegisterInputSlots(graph, layerIndex, layer);
1299 RegisterOutputSlots(graph, layerIndex, layer);
1300}
1301
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001302void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1303{
1304 CHECK_LAYERS(graph, 0, layerIndex);
1305
1306 auto inputs = GetInputs(graph, layerIndex);
1307 CHECK_VALID_SIZE(inputs.size(), 1);
1308
1309 auto outputs = GetOutputs(graph, layerIndex);
1310 CHECK_VALID_SIZE(outputs.size(), 1);
1311
1312 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1313 auto fbDescriptor = fbLayer->descriptor();
1314
1315 armnn::InstanceNormalizationDescriptor descriptor;
1316 descriptor.m_Gamma = fbDescriptor->gamma();
1317 descriptor.m_Beta = fbDescriptor->beta();
1318 descriptor.m_Eps = fbDescriptor->eps();
1319 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1320
1321 const std::string layerName = GetLayerName(graph, layerIndex);
1322 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1323
1324 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1325 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1326
1327 RegisterInputSlots(graph, layerIndex, layer);
1328 RegisterOutputSlots(graph, layerIndex, layer);
1329}
1330
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001331void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1332{
1333 CHECK_LAYERS(graph, 0, layerIndex);
1334
1335 auto inputs = GetInputs(graph, layerIndex);
1336 CHECK_VALID_SIZE(inputs.size(), 1);
1337
1338 auto outputs = GetOutputs(graph, layerIndex);
1339 CHECK_VALID_SIZE(outputs.size(), 1);
1340 auto outputInfo = ToTensorInfo(outputs[0]);
1341
1342 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1343 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1344
1345 auto layerName = GetLayerName(graph, layerIndex);
1346 armnn::L2NormalizationDescriptor descriptor;
1347 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001348 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001349
1350 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1351 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1352
1353 RegisterInputSlots(graph, layerIndex, layer);
1354 RegisterOutputSlots(graph, layerIndex, layer);
1355}
1356
Sadik Armagan26257852019-10-14 13:00:47 +01001357void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1358{
1359 CHECK_LAYERS(graph, 0, layerIndex);
1360
1361 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1362 CHECK_VALID_SIZE(inputs.size(), 1);
1363
1364 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1365 CHECK_VALID_SIZE(outputs.size(), 1);
1366
1367 armnn::LogSoftmaxDescriptor descriptor;
1368 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1369 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1370 auto layerName = GetLayerName(graph, layerIndex);
1371
1372 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1373
1374 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1375 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1376
1377 RegisterInputSlots(graph, layerIndex, layer);
1378 RegisterOutputSlots(graph, layerIndex, layer);
1379}
1380
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001381void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1382{
1383 CHECK_LAYERS(graph, 0, layerIndex);
1384 auto inputs = GetInputs(graph, layerIndex);
1385 CHECK_LOCATION();
1386 CHECK_VALID_SIZE(inputs.size(), 2);
1387
1388 auto outputs = GetOutputs(graph, layerIndex);
1389 CHECK_VALID_SIZE(outputs.size(), 1);
1390
1391 auto layerName = GetLayerName(graph, layerIndex);
1392 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1393
1394 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1395 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1396
1397 RegisterInputSlots(graph, layerIndex, layer);
1398 RegisterOutputSlots(graph, layerIndex, layer);
1399}
1400
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001401void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1402{
1403 CHECK_LAYERS(graph, 0, layerIndex);
1404 auto inputs = GetInputs(graph, layerIndex);
1405 CHECK_LOCATION();
1406 CHECK_VALID_SIZE(inputs.size(), 2);
1407
1408 auto outputs = GetOutputs(graph, layerIndex);
1409 CHECK_VALID_SIZE(outputs.size(), 1);
1410
1411 auto layerName = GetLayerName(graph, layerIndex);
1412 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1413
1414 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1415 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1416
1417 RegisterInputSlots(graph, layerIndex, layer);
1418 RegisterOutputSlots(graph, layerIndex, layer);
1419}
1420
Jim Flynne242f2d2019-05-22 14:24:13 +01001421const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1422 unsigned int layerIndex)
1423{
1424 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1425
1426 switch (layerType)
1427 {
1428 case Layer::Layer_ConcatLayer:
1429 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1430 case Layer::Layer_MergerLayer:
1431 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1432 default:
1433 throw armnn::Exception("unknown layer type, should be concat or merger");
1434 }
1435}
1436
Jim Flynn906f9462019-05-10 13:55:21 +01001437void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001438{
1439 CHECK_LAYERS(graph, 0, layerIndex);
1440 CHECK_LOCATION();
1441
1442 auto outputs = GetOutputs(graph, layerIndex);
1443 CHECK_VALID_SIZE(outputs.size(), 1);
1444
Jim Flynnac25a1b2019-02-28 10:40:49 +00001445 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001446 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1447 unsigned int numViews = originsDescriptor->numViews();
1448 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001449
1450 // can now check the number of inputs == number of views
1451 auto inputs = GetInputs(graph, layerIndex);
1452 CHECK_VALID_SIZE(inputs.size(), numViews);
1453
1454 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001455 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001456 for (unsigned int v = 0; v < numViews; ++v)
1457 {
1458 auto originPtr = originsPtr->Get(v);
1459 for (unsigned int d = 0; d < numDimensions; ++d)
1460 {
1461 uint32_t value = originPtr->data()->Get(d);
1462 descriptor.SetViewOriginCoord(v, d, value);
1463 }
1464 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001465 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001466
Jim Flynn906f9462019-05-10 13:55:21 +01001467 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001468 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1469 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1470
1471 RegisterInputSlots(graph, layerIndex, layer);
1472 RegisterOutputSlots(graph, layerIndex, layer);
1473}
1474
Derek Lamberti8ddae332019-02-21 16:29:43 +00001475void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001476{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001477 CHECK_LAYERS(graph, 0, layerIndex);
1478 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001479 CHECK_LOCATION();
1480 CHECK_VALID_SIZE(inputs.size(), 2);
1481
Derek Lamberti8ddae332019-02-21 16:29:43 +00001482 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001483 CHECK_VALID_SIZE(outputs.size(), 1);
1484
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001485 auto layerName = GetLayerName(graph, layerIndex);
1486 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001487
1488 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1489 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1490
Derek Lamberti8ddae332019-02-21 16:29:43 +00001491 RegisterInputSlots(graph, layerIndex, layer);
1492 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001493}
1494
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001495void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1496{
1497 CHECK_LAYERS(graph, 0, layerIndex);
1498 CHECK_LOCATION();
1499
1500 auto inputs = GetInputs(graph, layerIndex);
1501 CHECK_VALID_SIZE(inputs.size(), 1);
1502
1503 auto outputs = GetOutputs(graph, layerIndex);
1504 CHECK_VALID_SIZE(outputs.size(), 1);
1505
1506 auto layerName = GetLayerName(graph, layerIndex);
1507
1508 armnn::IConnectableLayer* layer;
1509
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001510 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001511
1512 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1513 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1514
1515 RegisterInputSlots(graph, layerIndex, layer);
1516 RegisterOutputSlots(graph, layerIndex, layer);
1517}
1518
Derek Lamberti8ddae332019-02-21 16:29:43 +00001519void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001520{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001521 CHECK_LAYERS(graph, 0, layerIndex);
1522 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001523 CHECK_LOCATION();
1524 CHECK_VALID_SIZE(inputs.size(), 1);
1525
Derek Lamberti8ddae332019-02-21 16:29:43 +00001526 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001527 CHECK_VALID_SIZE(outputs.size(), 1);
1528
Derek Lamberti8ddae332019-02-21 16:29:43 +00001529 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001530 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001531 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1532
1533 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1534 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1535 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1536
1537 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1538
1539 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001540 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001541 if (flatBufferDescriptor->biasEnabled())
1542 {
1543 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001544 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001545 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001546 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1547 weightsTensor,
1548 optionalBiases,
1549 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001550
1551 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1552 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1553
Derek Lamberti8ddae332019-02-21 16:29:43 +00001554 RegisterInputSlots(graph, layerIndex, layer);
1555 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001556}
1557
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001558void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1559{
1560 CHECK_LAYERS(graph, 0, layerIndex);
1561
1562 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1563 CHECK_VALID_SIZE(inputs.size(), 1);
1564
1565 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1566 CHECK_VALID_SIZE(outputs.size(), 1);
1567
1568 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1569 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001570 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001571
1572 if (flatBufferPadList->Length() % 2 != 0)
1573 {
1574 throw ParseException(boost::str(
1575 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1576 }
1577
1578 std::vector<std::pair<unsigned int, unsigned int>> padList;
1579 padList.reserve(flatBufferPadList->Length() / 2);
1580 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1581 {
1582 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1583 }
1584
David Monahan34757812019-06-19 11:47:21 +01001585 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001586
1587 auto layerName = GetLayerName(graph, layerIndex);
1588 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1589
1590 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1591 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1592
1593 RegisterInputSlots(graph, layerIndex, layer);
1594 RegisterOutputSlots(graph, layerIndex, layer);
1595}
1596
Derek Lamberti8ddae332019-02-21 16:29:43 +00001597void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001598{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001599 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001600
1601 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001602 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001603
Derek Lamberti8ddae332019-02-21 16:29:43 +00001604 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001605 CHECK_VALID_SIZE(inputs.size(), 1);
1606
Derek Lamberti8ddae332019-02-21 16:29:43 +00001607 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001608 CHECK_VALID_SIZE(outputs.size(), 1);
1609 auto outputInfo = ToTensorInfo(outputs[0]);
1610
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001611 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001612 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1613
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001614 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001615 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1616
Derek Lamberti8ddae332019-02-21 16:29:43 +00001617 RegisterInputSlots(graph, layerIndex, layer);
1618 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001619}
1620
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001621armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001622 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001623{
1624 armnn::Pooling2dDescriptor desc;
1625
1626 switch (pooling2dDesc->poolType())
1627 {
1628 case PoolingAlgorithm_Average:
1629 {
1630 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001631 break;
1632 }
1633 case PoolingAlgorithm_Max:
1634 {
1635 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001636 break;
1637 }
1638 default:
1639 {
1640 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1641 }
1642 }
1643
1644 switch (pooling2dDesc->outputShapeRounding())
1645 {
1646 case OutputShapeRounding_Floor:
1647 {
1648 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1649 break;
1650 }
1651 case OutputShapeRounding_Ceiling:
1652 {
1653 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1654 break;
1655 }
1656 default:
1657 {
1658 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1659 }
1660 }
1661
1662 switch (pooling2dDesc->paddingMethod())
1663 {
1664 case PaddingMethod_Exclude:
1665 {
1666 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1667 break;
1668 }
1669 case PaddingMethod_IgnoreValue:
1670 {
1671 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1672 break;
1673 }
1674 default:
1675 {
1676 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1677 }
1678 }
1679
1680 switch (pooling2dDesc->dataLayout())
1681 {
1682 case DataLayout_NCHW:
1683 {
1684 desc.m_DataLayout = armnn::DataLayout::NCHW;
1685 break;
1686 }
1687 case DataLayout_NHWC:
1688 {
1689 desc.m_DataLayout = armnn::DataLayout::NHWC;
1690 break;
1691 }
1692 default:
1693 {
1694 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1695 }
1696 }
1697
1698 desc.m_PadRight = pooling2dDesc->padRight();
1699 desc.m_PadLeft = pooling2dDesc->padLeft();
1700 desc.m_PadBottom = pooling2dDesc->padBottom();
1701 desc.m_PadTop = pooling2dDesc->padTop();
1702 desc.m_StrideX = pooling2dDesc->strideX();
1703 desc.m_StrideY = pooling2dDesc->strideY();
1704 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1705 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1706
1707 return desc;
1708}
1709
Derek Lamberti8ddae332019-02-21 16:29:43 +00001710void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001711{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001712 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001713
Derek Lamberti8ddae332019-02-21 16:29:43 +00001714 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001715 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001716 CHECK_VALID_SIZE(inputs.size(), 1);
1717
Derek Lamberti8ddae332019-02-21 16:29:43 +00001718 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001719 CHECK_VALID_SIZE(outputs.size(), 1);
1720 auto outputInfo = ToTensorInfo(outputs[0]);
1721
1722 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001723 auto layerName = GetLayerName(graph, layerIndex);
1724 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001725 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1726
Derek Lamberti8ddae332019-02-21 16:29:43 +00001727 RegisterInputSlots(graph, layerIndex, layer);
1728 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001729}
1730
Derek Lamberti87acb272019-03-27 16:51:31 +00001731void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1732{
1733 CHECK_LAYERS(graph, 0, layerIndex);
1734
1735 auto inputs = GetInputs(graph, layerIndex);
1736 CHECK_VALID_SIZE(inputs.size(), 1);
1737
1738 auto outputs = GetOutputs(graph, layerIndex);
1739 CHECK_VALID_SIZE(outputs.size(), 1);
1740 auto outputInfo = ToTensorInfo(outputs[0]);
1741
1742 auto layerName = GetLayerName(graph, layerIndex);
1743 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1744 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1745
1746 RegisterInputSlots(graph, layerIndex, layer);
1747 RegisterOutputSlots(graph, layerIndex, layer);
1748}
1749
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001750armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001751 const std::vector<uint32_t>& targetDimsIn)
1752{
1753 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1754 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1755
1756 if (stretchDim != targetDimsIn.end())
1757 {
1758 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1759 {
1760 throw ParseException(boost::str(
1761 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1762 }
1763
1764 auto targetNumElements =
1765 boost::numeric_cast<unsigned int>(
1766 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1767
1768 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1769 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1770 }
1771
1772 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1773
1774 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1775 reshapeInfo.SetShape(outputShape);
1776
1777 return reshapeInfo;
1778}
1779
Derek Lamberti8ddae332019-02-21 16:29:43 +00001780void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001781{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001782 CHECK_LAYERS(graph, 0, layerIndex);
1783 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001784
Derek Lamberti8ddae332019-02-21 16:29:43 +00001785 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001786 CHECK_VALID_SIZE(outputs.size(), 1);
1787
1788 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1789 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1790
Derek Lamberti8ddae332019-02-21 16:29:43 +00001791 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001792 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1793
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001794 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001795 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1796
1797 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1798 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1799
1800 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1801 {
1802 std::stringstream ss;
1803 ss << "New shape defined in reshape parameters "
1804 << reshapeOutputTensorShape
1805 << " does not equal output shape "
1806 << actualOutputTensorInfo.GetShape()
1807 << ": "
1808 << CHECK_LOCATION().AsString();
1809 throw ParseException(ss.str());
1810 }
1811
1812 armnn::ReshapeDescriptor reshapeDesc;
1813 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1814
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001815 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001816 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1817 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1818
Derek Lamberti8ddae332019-02-21 16:29:43 +00001819 RegisterInputSlots(graph, layerIndex, layer);
1820 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001821}
1822
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01001823void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
1824{
1825 CHECK_LAYERS(graph, 0, layerIndex);
1826
1827 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1828 CHECK_VALID_SIZE(inputs.size(), 1);
1829
1830 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1831 CHECK_VALID_SIZE(outputs.size(), 1);
1832
1833 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
1834
1835 armnn::ResizeDescriptor descriptor;
1836 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1837 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1838 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
1839 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1840
1841 auto layerName = GetLayerName(graph, layerIndex);
1842 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
1843
1844 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1845 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1846
1847 RegisterInputSlots(graph, layerIndex, layer);
1848 RegisterOutputSlots(graph, layerIndex, layer);
1849}
1850
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001851void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1852{
1853 CHECK_LAYERS(graph, 0, layerIndex);
1854
1855 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1856 CHECK_VALID_SIZE(inputs.size(), 1);
1857
1858 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1859 CHECK_VALID_SIZE(outputs.size(), 1);
1860
1861 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1862
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001863 armnn::ResizeDescriptor descriptor;
1864 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001865 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001866 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
1867 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001868
1869 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001870 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001871
1872 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1873 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1874
1875 RegisterInputSlots(graph, layerIndex, layer);
1876 RegisterOutputSlots(graph, layerIndex, layer);
1877}
1878
Derek Lamberti8ddae332019-02-21 16:29:43 +00001879void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001880{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001881 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001882
Derek Lamberti8ddae332019-02-21 16:29:43 +00001883 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001884 CHECK_VALID_SIZE(inputs.size(), 1);
1885
Derek Lamberti8ddae332019-02-21 16:29:43 +00001886 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001887 CHECK_VALID_SIZE(outputs.size(), 1);
1888
1889 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001890 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001891 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001892
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001893 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1894
1895 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1897
Derek Lamberti8ddae332019-02-21 16:29:43 +00001898 RegisterInputSlots(graph, layerIndex, layer);
1899 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001900}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001901
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001902void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1903{
1904 CHECK_LAYERS(graph, 0, layerIndex);
1905
1906 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1907 CHECK_VALID_SIZE(inputs.size(), 1);
1908
1909 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1910 CHECK_VALID_SIZE(outputs.size(), 1);
1911
1912 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1913 auto flatBufferPadList = flatBufferDescriptor->padList();
1914 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1915
1916 if (flatBufferPadList->Length() % 2 != 0)
1917 {
1918 throw ParseException(boost::str(
1919 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1920 }
1921
1922 std::vector<std::pair<unsigned int, unsigned int>> padList;
1923 padList.reserve(flatBufferPadList->Length() / 2);
1924 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1925 {
1926 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1927 }
1928
1929 armnn::SpaceToBatchNdDescriptor descriptor;
1930 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1931 descriptor.m_BlockShape =
1932 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1933 descriptor.m_PadList = padList;
1934
1935 auto layerName = GetLayerName(graph, layerIndex);
1936 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1937
1938 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1939 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1940
1941 RegisterInputSlots(graph, layerIndex, layer);
1942 RegisterOutputSlots(graph, layerIndex, layer);
1943}
1944
Aron Virginas-Taraa067142019-06-11 16:01:44 +01001945void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
1946{
1947 CHECK_LAYERS(graph, 0, layerIndex);
1948
1949 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1950 CHECK_VALID_SIZE(inputs.size(), 1);
1951
1952 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1953 CHECK_VALID_SIZE(outputs.size(), 1);
1954
1955 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
1956
1957 armnn::SpaceToDepthDescriptor descriptor;
1958 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
1959 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1960
1961 auto layerName = GetLayerName(graph, layerIndex);
1962 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
1963
1964 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1965 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1966
1967 RegisterInputSlots(graph, layerIndex, layer);
1968 RegisterOutputSlots(graph, layerIndex, layer);
1969}
1970
Nina Drozd57728782019-02-27 10:53:27 +00001971armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1972 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1973 unsigned int layerIndex)
1974{
1975 armnn::NormalizationDescriptor desc;
1976
1977 switch (normalizationDescriptor->normChannelType())
1978 {
1979 case NormalizationAlgorithmChannel_Across:
1980 {
1981 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1982 break;
1983 }
1984 case NormalizationAlgorithmChannel_Within:
1985 {
1986 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1987 break;
1988 }
1989 default:
1990 {
1991 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1992 }
1993 }
1994
1995 switch (normalizationDescriptor->normMethodType())
1996 {
1997 case NormalizationAlgorithmMethod_LocalBrightness:
1998 {
1999 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2000 break;
2001 }
2002 case NormalizationAlgorithmMethod_LocalContrast:
2003 {
2004 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2005 break;
2006 }
2007 default:
2008 {
2009 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
2010 }
2011 }
2012
2013 switch (normalizationDescriptor->dataLayout())
2014 {
2015 case DataLayout_NCHW:
2016 {
2017 desc.m_DataLayout = armnn::DataLayout::NCHW;
2018 break;
2019 }
2020 case DataLayout_NHWC:
2021 {
2022 desc.m_DataLayout = armnn::DataLayout::NHWC;
2023 break;
2024 }
2025 default:
2026 {
2027 BOOST_ASSERT_MSG(false, "Unsupported data layout");
2028 }
2029 }
2030
2031 desc.m_Alpha = normalizationDescriptor->alpha();
2032 desc.m_Beta = normalizationDescriptor->beta();
2033 desc.m_K = normalizationDescriptor->k();
2034 desc.m_NormSize = normalizationDescriptor->normSize();
2035
2036 return desc;
2037}
2038
2039void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2040{
2041 CHECK_LAYERS(graph, 0, layerIndex);
2042
2043 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2044
2045 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2046 CHECK_VALID_SIZE(inputs.size(), 1);
2047
2048 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2049 CHECK_VALID_SIZE(outputs.size(), 1);
2050
2051 auto outputInfo = ToTensorInfo(outputs[0]);
2052
2053 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2054 auto layerName = GetLayerName(graph, layerIndex);
2055
2056 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2057 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2058
2059 RegisterInputSlots(graph, layerIndex, layer);
2060 RegisterOutputSlots(graph, layerIndex, layer);
2061}
2062
Sadik Armagan8b42a382019-03-01 14:24:49 +00002063void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2064{
2065 CHECK_LAYERS(graph, 0, layerIndex);
2066 auto inputs = GetInputs(graph, layerIndex);
2067 CHECK_LOCATION();
2068 CHECK_VALID_SIZE(inputs.size(), 1);
2069
2070 auto outputs = GetOutputs(graph, layerIndex);
2071 CHECK_VALID_SIZE(outputs.size(), 1);
2072
2073 auto layerName = GetLayerName(graph, layerIndex);
2074 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
2075
2076 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2077 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2078
2079 RegisterInputSlots(graph, layerIndex, layer);
2080 RegisterOutputSlots(graph, layerIndex, layer);
2081}
2082
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002083void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2084{
2085 CHECK_LAYERS(graph, 0, layerIndex);
2086
2087 auto inputs = GetInputs(graph, layerIndex);
2088 CHECK_VALID_SIZE(inputs.size(), 1);
2089
2090 auto outputs = GetOutputs(graph, layerIndex);
2091 CHECK_VALID_SIZE(outputs.size(), 1);
2092
2093 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2094
2095 auto fbBegin = fbDescriptor->begin();
2096 auto fbSize = fbDescriptor->size();
2097
2098 if (fbBegin->Length() != fbSize->Length())
2099 {
2100 throw ParseException(boost::str(
2101 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2102 }
2103
2104 armnn::SliceDescriptor descriptor;
2105 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2106 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2107
2108 auto layerName = GetLayerName(graph, layerIndex);
2109 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2110
2111 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2112 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2113
2114 RegisterInputSlots(graph, layerIndex, layer);
2115 RegisterOutputSlots(graph, layerIndex, layer);
2116}
2117
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002118void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2119{
2120 CHECK_LAYERS(graph, 0, layerIndex);
2121
2122 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2123 CHECK_VALID_SIZE(inputs.size(), 1);
2124
2125 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2126 CHECK_VALID_SIZE(outputs.size(), 1);
2127
2128 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2129
2130 auto flatBufferBegin = flatBufferDescriptor->begin();
2131 auto flatBufferEnd = flatBufferDescriptor->end();
2132 auto flatBufferStride = flatBufferDescriptor->stride();
2133
2134 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2135 flatBufferBegin->Length() == flatBufferStride->Length()))
2136 {
2137 throw ParseException(boost::str(
2138 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2139 }
2140
2141 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2142 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2143 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2144
2145 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2146 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2147 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2148 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2149 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2150 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2151 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2152
2153 auto layerName = GetLayerName(graph, layerIndex);
2154 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2155
2156 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2157 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2158
2159 RegisterInputSlots(graph, layerIndex, layer);
2160 RegisterOutputSlots(graph, layerIndex, layer);
2161}
2162
Conor Kennedyda1f9752019-03-01 14:37:12 +00002163void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2164{
2165 CHECK_LAYERS(graph, 0, layerIndex);
2166 auto inputs = GetInputs(graph, layerIndex);
2167 CHECK_LOCATION();
2168 CHECK_VALID_SIZE(inputs.size(), 2);
2169
2170 auto outputs = GetOutputs(graph, layerIndex);
2171 CHECK_VALID_SIZE(outputs.size(), 1);
2172
2173 auto layerName = GetLayerName(graph, layerIndex);
2174 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2175
2176 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2177 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2178
2179 RegisterInputSlots(graph, layerIndex, layer);
2180 RegisterOutputSlots(graph, layerIndex, layer);
2181}
2182
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002183void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2184{
2185 CHECK_LAYERS(graph, 0, layerIndex);
2186
2187 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2188 CHECK_VALID_SIZE(inputs.size(), 2);
2189
2190 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2191 CHECK_VALID_SIZE(outputs.size(), 1);
2192
2193 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002194 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2195
2196 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002197 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2198
2199 RegisterInputSlots(graph, layerIndex, layer);
2200 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002201}
2202
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002203void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2204{
2205 CHECK_LAYERS(graph, 0, layerIndex);
2206
2207 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2208 CHECK_VALID_SIZE(inputs.size(), 1);
2209
2210 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2211 CHECK_VALID_SIZE(outputs.size(), 1);
2212
2213 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2214 auto flatBufferAxis = flatBufferDescriptor->axis();
2215 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2216
2217 armnn::MeanDescriptor descriptor;
2218 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2219 descriptor.m_KeepDims = flatBufferKeepDims;
2220
2221 auto layerName = GetLayerName(graph, layerIndex);
2222 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2223
2224 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2225 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2226
2227 RegisterInputSlots(graph, layerIndex, layer);
2228 RegisterOutputSlots(graph, layerIndex, layer);
2229}
2230
Jim Flynn18ce3382019-03-08 11:08:30 +00002231void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2232{
2233 CHECK_LAYERS(graph, 0, layerIndex);
2234
2235 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2236 CHECK_VALID_SIZE(inputs.size(), 1);
2237
2238 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2239
2240 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2241 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2242 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2243 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2244 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2245 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2246
2247 // Check numViews and numDimensions corresponds to the ones already serialized ...
2248 // numViews == flatBufferViewSizes.size();
2249 // foreach: numDimensions == flatBufferViewSizes[x].size();
2250
2251 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2252 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2253 {
2254 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2255 {
2256 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2257 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2258 }
2259 }
2260
2261 auto layerName = GetLayerName(graph, layerIndex);
2262 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2263
2264 // I could have as many outputs as views ...
2265 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2266 {
2267 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2268 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2269 }
2270
2271 RegisterInputSlots(graph, layerIndex, layer);
2272 RegisterOutputSlots(graph, layerIndex, layer);
2273}
2274
Jim Flynn11af3752019-03-19 17:22:29 +00002275armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2276{
2277 armnn::LstmDescriptor desc;
2278
2279 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2280 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2281 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2282 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2283 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2284 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002285 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002286
2287 return desc;
2288}
2289
2290void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2291{
2292 CHECK_LAYERS(graph, 0, layerIndex);
2293
2294 auto inputs = GetInputs(graph, layerIndex);
2295 CHECK_VALID_SIZE(inputs.size(), 3);
2296
2297 auto outputs = GetOutputs(graph, layerIndex);
2298 CHECK_VALID_SIZE(outputs.size(), 4);
2299
2300 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2301 auto layerName = GetLayerName(graph, layerIndex);
2302 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2303 auto flatBufferInputParams = flatBufferLayer->inputParams();
2304
2305 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2306
2307 armnn::LstmInputParams lstmInputParams;
2308
2309 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2310 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2311 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2312 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2313 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2314 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2315 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2316 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2317 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2318
2319 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2320 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2321 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2322 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2323 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2324 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2325 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2326 lstmInputParams.m_CellBias = &cellBias;
2327 lstmInputParams.m_OutputGateBias = &outputGateBias;
2328
2329 armnn::ConstTensor inputToInputWeights;
2330 armnn::ConstTensor recurrentToInputWeights;
2331 armnn::ConstTensor cellToInputWeights;
2332 armnn::ConstTensor inputGateBias;
2333 if (!lstmDescriptor.m_CifgEnabled)
2334 {
2335 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2336 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2337 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2338 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2339
2340 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2341 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2342 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2343 lstmInputParams.m_InputGateBias = &inputGateBias;
2344 }
2345
2346 armnn::ConstTensor projectionWeights;
2347 armnn::ConstTensor projectionBias;
2348 if (lstmDescriptor.m_ProjectionEnabled)
2349 {
2350 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2351 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2352
2353 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2354 lstmInputParams.m_ProjectionBias = &projectionBias;
2355 }
2356
2357 armnn::ConstTensor cellToForgetWeights;
2358 armnn::ConstTensor cellToOutputWeights;
2359 if (lstmDescriptor.m_PeepholeEnabled)
2360 {
2361 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2362 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2363
2364 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2365 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2366 }
2367
Jan Eilersf8c62972019-07-17 11:07:49 +01002368 armnn::ConstTensor inputLayerNormWeights;
2369 armnn::ConstTensor forgetLayerNormWeights;
2370 armnn::ConstTensor cellLayerNormWeights;
2371 armnn::ConstTensor outputLayerNormWeights;
2372 if (lstmDescriptor.m_LayerNormEnabled)
2373 {
2374 if (!lstmDescriptor.m_CifgEnabled)
2375 {
2376 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2377 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2378 }
2379 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2380 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2381 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2382
2383 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2384 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2385 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2386 }
2387
Jim Flynn11af3752019-03-19 17:22:29 +00002388 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2389
2390 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2391 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2392
2393 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2394 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2395
2396 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2397 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2398
2399 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2400 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2401
2402 RegisterInputSlots(graph, layerIndex, layer);
2403 RegisterOutputSlots(graph, layerIndex, layer);
2404}
2405
Jan Eilers5b01a892019-07-23 09:47:43 +01002406void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2407{
2408 CHECK_LAYERS(graph, 0, layerIndex);
2409
2410 auto inputs = GetInputs(graph, layerIndex);
2411 CHECK_VALID_SIZE(inputs.size(), 3);
2412
2413 auto outputs = GetOutputs(graph, layerIndex);
2414 CHECK_VALID_SIZE(outputs.size(), 2);
2415
2416 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2417 auto layerName = GetLayerName(graph, layerIndex);
2418 auto flatBufferInputParams = flatBufferLayer->inputParams();
2419
2420 armnn::QuantizedLstmInputParams lstmInputParams;
2421
2422 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2423 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2424 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2425 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2426 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2427 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2428 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2429 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2430 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2431 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2432 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2433 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2434
2435 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2436 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2437 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2438 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2439 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2440 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2441 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2442 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2443 lstmInputParams.m_InputGateBias = &inputGateBias;
2444 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2445 lstmInputParams.m_CellBias = &cellBias;
2446 lstmInputParams.m_OutputGateBias = &outputGateBias;
2447
2448 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2449
2450 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2451 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2452
2453 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2454 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2455
2456 RegisterInputSlots(graph, layerIndex, layer);
2457 RegisterOutputSlots(graph, layerIndex, layer);
2458}
2459
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002460void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2461{
2462 CHECK_LAYERS(graph, 0, layerIndex);
2463
2464 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2465 CHECK_VALID_SIZE(inputs.size(), 1);
2466
2467 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2468 CHECK_VALID_SIZE(outputs.size(), 1);
2469
2470 const std::string layerName = GetLayerName(graph, layerIndex);
2471 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2472
2473 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2474 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2475
2476 RegisterInputSlots(graph, layerIndex, layer);
2477 RegisterOutputSlots(graph, layerIndex, layer);
2478}
2479
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002480void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2481{
2482 CHECK_LAYERS(graph, 0, layerIndex);
2483
2484 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2485 CHECK_VALID_SIZE(inputs.size(), 2);
2486
2487 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2488 CHECK_VALID_SIZE(outputs.size(), 1);
2489
2490 const std::string layerName = GetLayerName(graph, layerIndex);
2491 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2492
2493 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2494 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2495
2496 RegisterInputSlots(graph, layerIndex, layer);
2497 RegisterOutputSlots(graph, layerIndex, layer);
2498}
2499
Sadik Armaganeff363d2019-04-05 15:25:46 +01002500void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2501{
2502 CHECK_LAYERS(graph, 0, layerIndex);
2503 auto inputs = GetInputs(graph, layerIndex);
2504 CHECK_LOCATION();
2505 CHECK_VALID_SIZE(inputs.size(), 2);
2506
2507 auto outputs = GetOutputs(graph, layerIndex);
2508 CHECK_VALID_SIZE(outputs.size(), 2);
2509
2510 auto layerName = GetLayerName(graph, layerIndex);
2511 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2512
2513 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2514 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2515
2516 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2517 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2518
2519 RegisterInputSlots(graph, layerIndex, layer);
2520 RegisterOutputSlots(graph, layerIndex, layer);
2521}
2522
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002523void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2524{
2525 CHECK_LAYERS(graph, 0, layerIndex);
2526 auto inputs = GetInputs(graph, layerIndex);
2527 CHECK_LOCATION();
2528 CHECK_VALID_SIZE(inputs.size(), 2);
2529
2530 auto outputs = GetOutputs(graph, layerIndex);
2531 CHECK_VALID_SIZE(outputs.size(), 1);
2532
2533 auto layerName = GetLayerName(graph, layerIndex);
2534 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2535
2536 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2537 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2538
2539 RegisterInputSlots(graph, layerIndex, layer);
2540 RegisterOutputSlots(graph, layerIndex, layer);
2541}
2542
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002543void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2544{
2545 CHECK_LAYERS(graph, 0, layerIndex);
2546
2547 auto inputs = GetInputs(graph, layerIndex);
2548 CHECK_VALID_SIZE(inputs.size(), 1);
2549
2550 auto outputs = GetOutputs(graph, layerIndex);
2551 CHECK_VALID_SIZE(outputs.size(), 1);
2552
2553 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2554 auto layerName = GetLayerName(graph, layerIndex);
2555 auto serializerDescriptor = serializerLayer->descriptor();
2556
2557 armnn::TransposeConvolution2dDescriptor descriptor;
2558 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2559 descriptor.m_PadRight = serializerDescriptor->padRight();
2560 descriptor.m_PadTop = serializerDescriptor->padTop();
2561 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2562 descriptor.m_StrideX = serializerDescriptor->strideX();
2563 descriptor.m_StrideY = serializerDescriptor->strideY();;
2564 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2565 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2566
2567 // weights & biases
2568 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2569 armnn::Optional<armnn::ConstTensor> optionalBiases;
2570 if (descriptor.m_BiasEnabled)
2571 {
2572 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2573 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2574 }
2575
2576 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2577 weights,
2578 optionalBiases,
2579 layerName.c_str());
2580
2581 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2582 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2583
2584 RegisterInputSlots(graph, layerIndex, layer);
2585 RegisterOutputSlots(graph, layerIndex, layer);
2586}
2587
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002588void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2589{
2590 CHECK_LAYERS(graph, 0, layerIndex);
2591 auto inputs = GetInputs(graph, layerIndex);
2592
2593 auto outputs = GetOutputs(graph, layerIndex);
2594 CHECK_VALID_SIZE(outputs.size(), 1);
2595
2596 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2597 unsigned int axis = flatBufferDescriptor->axis();
2598 unsigned int numInputs = flatBufferDescriptor->numInputs();
2599 CHECK_VALID_SIZE(inputs.size(), numInputs);
2600
2601 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2602 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2603 flatBufferInputShape->begin() + flatBufferInputShape->size());
2604
2605 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2606 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2607
2608 for (unsigned int i=0; i<inputs.size(); ++i)
2609 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002610 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002611 if (descriptor.m_InputShape != inputShape)
2612 {
2613 std::stringstream ss;
2614 ss << "Shape of input "
2615 << i
2616 << " "
2617 << inputShape
2618 << " does not equal defined input shape "
2619 << descriptor.m_InputShape
2620 << ": "
2621 << CHECK_LOCATION().AsString();
2622 throw ParseException(ss.str());
2623 }
2624 }
2625
2626 auto layerName = GetLayerName(graph, layerIndex);
2627 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2628
2629 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2630 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2631
2632 RegisterInputSlots(graph, layerIndex, layer);
2633 RegisterOutputSlots(graph, layerIndex, layer);
2634}
2635
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002636} // namespace armnnDeserializer