blob: f8ec2e79cf3e84f6553a18036b41c9efb5a0767d [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100188 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100191 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000192 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000193 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100194 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000195 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
197 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000198 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000199 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000200 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000201 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000202 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000203 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000204 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000205 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000206 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000207 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000208 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000209 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
210 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100211 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100212 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000213 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000214 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000215 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000216 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000217 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100218 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000219 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100220 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000221 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000222 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100223 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000224 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000225 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000226 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100227 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000228 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100229 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000230 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000231 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100232 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100233 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000234}
235
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000236Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000237{
238 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
239
240 switch(layerType)
241 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100242 case Layer::Layer_AbsLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000244 case Layer::Layer_ActivationLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000246 case Layer::Layer_AdditionLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100248 case Layer::Layer_ArgMinMaxLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000250 case Layer::Layer_BatchToSpaceNdLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000252 case Layer::Layer_BatchNormalizationLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100254 case Layer::Layer_ConcatLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000256 case Layer::Layer_ConstantLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000258 case Layer::Layer_Convolution2dLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000260 case Layer::Layer_DepthwiseConvolution2dLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000262 case Layer::Layer_DequantizeLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000264 case Layer::Layer_DetectionPostProcessLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000266 case Layer::Layer_DivisionLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000268 case Layer::Layer_EqualLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000270 case Layer::Layer_FullyConnectedLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000272 case Layer::Layer_FloorLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000274 case Layer::Layer_GatherLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000276 case Layer::Layer_GreaterLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000278 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000279 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000280 case Layer::Layer_L2NormalizationLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000282 case Layer::Layer_LstmLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000284 case Layer::Layer_MeanLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000286 case Layer::Layer_MinimumLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000288 case Layer::Layer_MaximumLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100290 case Layer::Layer_MergeLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000292 case Layer::Layer_MergerLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000294 case Layer::Layer_MultiplicationLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000296 case Layer::Layer_NormalizationLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000298 case Layer::Layer_OutputLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000300 case Layer::Layer_PadLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000302 case Layer::Layer_PermuteLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000304 case Layer::Layer_Pooling2dLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100306 case Layer::Layer_PreluLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000308 case Layer::Layer_QuantizeLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100310 case Layer::Layer_QuantizedLstmLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000312 case Layer::Layer_ReshapeLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000314 case Layer::Layer_ResizeBilinearLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100316 case Layer::Layer_ResizeLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000318 case Layer::Layer_RsqrtLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000320 case Layer::Layer_SoftmaxLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000322 case Layer::Layer_SpaceToBatchNdLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100324 case Layer::Layer_SpaceToDepthLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000326 case Layer::Layer_SplitterLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100328 case Layer::Layer_StackLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000330 case Layer::Layer_StridedSliceLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000332 case Layer::Layer_SubtractionLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100334 case Layer::Layer_SwitchLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100336 case Layer::Layer_TransposeConvolution2dLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000338 case Layer::Layer_NONE:
339 default:
340 throw ParseException(boost::str(
341 boost::format("Layer must have a type %1%") %
342 Layer::Layer_NONE));
343 }
344}
345
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000346std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
347{
348 auto layer = GetBaseLayer(graph, index);
349 assert(layer);
350 return layer->layerName()->str();
351}
352
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000353int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000354{
355 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
356
357 if (layerType == Layer::Layer_InputLayer)
358 {
359 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
360 }
361 else if ( layerType == Layer::Layer_OutputLayer )
362 {
363 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
364 }
365 return 0;
366}
367
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000368armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000369{
370 switch (dataLayout)
371 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000372 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000373 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000374 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000375 default:
376 return armnn::DataLayout::NCHW;
377 }
378}
379
Mike Kellyaf484012019-02-20 16:53:11 +0000380armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
381{
382 switch (function)
383 {
384 case armnnSerializer::ActivationFunction_Sigmoid:
385 return armnn::ActivationFunction::Sigmoid;
386 case armnnSerializer::ActivationFunction_TanH:
387 return armnn::ActivationFunction::TanH;
388 case armnnSerializer::ActivationFunction_Linear:
389 return armnn::ActivationFunction::Linear;
390 case armnnSerializer::ActivationFunction_ReLu:
391 return armnn::ActivationFunction::ReLu;
392 case armnnSerializer::ActivationFunction_BoundedReLu:
393 return armnn::ActivationFunction::BoundedReLu;
394 case armnnSerializer::ActivationFunction_LeakyReLu:
395 return armnn::ActivationFunction::LeakyReLu;
396 case armnnSerializer::ActivationFunction_Abs:
397 return armnn::ActivationFunction::Abs;
398 case armnnSerializer::ActivationFunction_Sqrt:
399 return armnn::ActivationFunction::Sqrt;
400 case armnnSerializer::ActivationFunction_Square:
401 return armnn::ActivationFunction::Square;
402 default:
403 return armnn::ActivationFunction::Sigmoid;
404 }
405}
406
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100407armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
408{
409 switch (function)
410 {
411 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
412 return armnn::ArgMinMaxFunction::Max;
413 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
414 default:
415 return armnn::ArgMinMaxFunction::Min;
416 }
417}
418
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100419armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
420{
421 switch (method)
422 {
423 case armnnSerializer::ResizeMethod_NearestNeighbor:
424 return armnn::ResizeMethod::NearestNeighbor;
425 case armnnSerializer::ResizeMethod_Bilinear:
426 return armnn::ResizeMethod::NearestNeighbor;
427 default:
428 return armnn::ResizeMethod::NearestNeighbor;
429 }
430}
431
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000432armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000433{
434 armnn::DataType type;
435 CHECK_TENSOR_PTR(tensorPtr);
436
437 switch (tensorPtr->dataType())
438 {
439 case DataType_QuantisedAsymm8:
440 type = armnn::DataType::QuantisedAsymm8;
441 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000442 case DataType_QuantisedSymm16:
443 type = armnn::DataType::QuantisedSymm16;
444 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000445 case DataType_Signed32:
446 type = armnn::DataType::Signed32;
447 break;
Kevin May43a799c2019-02-08 16:31:42 +0000448 case DataType_Float32:
449 type = armnn::DataType::Float32;
450 break;
451 case DataType_Float16:
452 type = armnn::DataType::Float16;
453 break;
454 case DataType_Boolean:
455 type = armnn::DataType::Boolean;
456 break;
457 default:
458 {
459 CheckLocation location = CHECK_LOCATION();
460 throw ParseException(
461 boost::str(
462 boost::format("Unsupported data type %1% = %2%. %3%") %
463 tensorPtr->dataType() %
464 EnumNameDataType(tensorPtr->dataType()) %
465 location.AsString()));
466 }
467 }
468 float quantizationScale = tensorPtr->quantizationScale();
469 int32_t quantizationOffset = tensorPtr->quantizationOffset();
470
471 auto dimensions = tensorPtr->dimensions();
472 unsigned int size = dimensions->size();
473 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
474
475 // two statements (on purpose) for easier debugging:
476 armnn::TensorInfo result(size,
477 outputDims.data(),
478 type,
479 quantizationScale,
480 quantizationOffset);
481 return result;
482}
483
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000484armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000485{
486 CHECK_CONST_TENSOR_PTR(constTensorPtr);
487 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
488
489 switch (constTensorPtr->data_type())
490 {
491 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000492 {
493 auto byteData = constTensorPtr->data_as_ByteData()->data();
494 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
495 return armnn::ConstTensor(tensorInfo, byteData->data());
496 }
Mike Kellya0766c32019-02-19 17:22:07 +0000497 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000498 {
499 auto shortData = constTensorPtr->data_as_ShortData()->data();
500 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
501 return armnn::ConstTensor(tensorInfo, shortData->data());
502 }
Mike Kellya0766c32019-02-19 17:22:07 +0000503 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000504 {
505 auto intData = constTensorPtr->data_as_IntData()->data();
506 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
507 return armnn::ConstTensor(tensorInfo, intData->data());
508 }
Mike Kellya0766c32019-02-19 17:22:07 +0000509 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000510 {
511 auto longData = constTensorPtr->data_as_LongData()->data();
512 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
513 return armnn::ConstTensor(tensorInfo, longData->data());
514 }
Mike Kellya0766c32019-02-19 17:22:07 +0000515 default:
516 {
517 CheckLocation location = CHECK_LOCATION();
518 throw ParseException(
519 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
520 constTensorPtr->data_type() %
521 EnumNameConstTensorData(constTensorPtr->data_type()) %
522 location.AsString()));
523 }
524 }
525}
526
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000527Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000528 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000529{
530 CHECK_LAYERS(graphPtr, 0, layerIndex);
531 auto layer = GetBaseLayer(graphPtr, layerIndex);
532 const auto& numInputs = layer->inputSlots()->size();
533
534 TensorRawPtrVector result(numInputs);
535
536 for (unsigned int i=0; i<numInputs; ++i)
537 {
538 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
539 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
540 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
541 }
542 return result;
543}
544
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000545Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000546 unsigned int layerIndex)
547{
548 CHECK_LAYERS(graphPtr, 0, layerIndex);
549 auto layer = GetBaseLayer(graphPtr, layerIndex);
550 const auto& numOutputs = layer->outputSlots()->size();
551
552 TensorRawPtrVector result(numOutputs);
553
554 for (unsigned int i=0; i<numOutputs; ++i)
555 {
556 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
557 }
558 return result;
559}
560
Derek Lamberti8ddae332019-02-21 16:29:43 +0000561void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000562{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000563 CHECK_LAYERS(graph, 0, layerIndex);
564 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000565 throw ParseException(
566 boost::str(
567 boost::format("Layer not supported. "
568 "layerIndex: %1% "
569 "layerName: %2% / %3%") %
570 layerIndex %
571 layerName %
572 CHECK_LOCATION().AsString()));
573}
574
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000575void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000576{
577 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000578 m_InputBindings.clear();
579 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000580}
581
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000582IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000583{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000584 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000585}
586
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000587IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000588{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000589 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000590}
591
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000592void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000593{
594 delete parser;
595}
596
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000597INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000598{
599 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000600 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
601 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000602}
603
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000604armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000605{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000606 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000607 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
608 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
609 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000610}
611
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000612Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000613{
614 if (binaryContent == nullptr)
615 {
616 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
617 CHECK_LOCATION().AsString()));
618 }
619 flatbuffers::Verifier verifier(binaryContent, len);
620 if (verifier.VerifyBuffer<SerializedGraph>() == false)
621 {
622 throw ParseException(
623 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
624 "flatbuffers format. size:%1% %2%") %
625 len %
626 CHECK_LOCATION().AsString()));
627 }
628 return GetSerializedGraph(binaryContent);
629}
630
Derek Lamberti8ddae332019-02-21 16:29:43 +0000631INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000632{
633 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000634 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000635 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000636 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000637 {
638 if (layer->layer_type() != Layer_InputLayer &&
639 layer->layer_type() != Layer_OutputLayer)
640 {
641 // lookup and call the parser function
642 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000643 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000644 }
645 ++layerIndex;
646 }
647
Derek Lamberti8ddae332019-02-21 16:29:43 +0000648 SetupInputLayers(graph);
649 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000650
651 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100652 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000653 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100654 Connections& connections = graphIt.second;
655 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000656 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100657 const unsigned int outputSlotIndex = outputIt.first;
658 IOutputSlot* outputSlot = outputIt.second;
659 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000660 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100661 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000662 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100663 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000664 }
Kevin May43a799c2019-02-08 16:31:42 +0000665 }
666 }
667 }
668
669 return std::move(m_Network);
670}
671
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000672BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000673 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000674{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000675 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000676 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000677 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000678 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000679 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000680 }
681 }
682 throw ParseException(
683 boost::str(
684 boost::format("No input binding found for layer:%1% / %2%") %
685 name %
686 CHECK_LOCATION().AsString()));
687}
688
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000689BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000690 const std::string& name) const
691{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000692 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000693 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000694 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000695 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000696 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000697 }
698 }
699 throw ParseException(
700 boost::str(
701 boost::format("No output binding found for layer:%1% / %2%") %
702 name %
703 CHECK_LOCATION().AsString()));
704}
705
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100706unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
707{
708 for (unsigned int i = 0; i < graph->layers()->size(); i++)
709 {
710 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
711 if (layer->index() == targetIndex)
712 {
713 return i;
714 }
715 }
716 throw ParseException("Layer with given index not found");
717}
718
Derek Lamberti8ddae332019-02-21 16:29:43 +0000719void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000720{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000721 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100722 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000723 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100724 m_InputBindings.reserve(numInputs);
725
726 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000727 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100728 const unsigned int inputId = graph->inputIds()->Get(i);
729 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
730 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000731
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100732 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
733 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
734 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000735
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100736 IConnectableLayer* inputLayer =
737 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000738
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100739 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
740 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
741 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
742
Derek Lamberti8ddae332019-02-21 16:29:43 +0000743 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100744 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000745 }
746}
747
Derek Lamberti8ddae332019-02-21 16:29:43 +0000748void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000749{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000750 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100751 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000752 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100753 m_OutputBindings.reserve(numOutputs);
754
755 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000756 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100757 const unsigned int outputId = graph->outputIds()->Get(i);
758 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
759 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000760
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100761 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
762 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
763 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000764
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100765 IConnectableLayer* outputLayer =
766 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000767
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100768 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
769
770 unsigned int sourceLayerIndex =
771 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
772 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
773 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
774
Derek Lamberti8ddae332019-02-21 16:29:43 +0000775 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100776 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000777 }
778}
779
Derek Lamberti8ddae332019-02-21 16:29:43 +0000780void Deserializer::RegisterOutputSlots(GraphPtr graph,
781 uint32_t layerIndex,
782 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000783{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000784 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000785 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100786 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
787 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000788 {
789 throw ParseException(
790 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
791 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100792 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000793 layer->GetNumOutputSlots() %
794 layerIndex %
795 CHECK_LOCATION().AsString()));
796 }
797
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100798 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000799 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100800 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
801 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
802 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
803 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000804 }
805}
806
Derek Lamberti8ddae332019-02-21 16:29:43 +0000807void Deserializer::RegisterInputSlots(GraphPtr graph,
808 uint32_t layerIndex,
809 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000810{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000811 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000812 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100813 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
814 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000815 {
816 throw ParseException(
817 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
818 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100819 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000820 layer->GetNumInputSlots() %
821 layerIndex %
822 CHECK_LOCATION().AsString()));
823 }
824
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100825 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000826 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100827 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
828 auto fbConnection = fbInputSlot->connection();
829 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
830 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000831 }
832}
833
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000834void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
835 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100836 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000837{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100838 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000839 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100840 m_GraphConnections[sourceLayerIndex] = Connections();
841 }
842
843 Connections& connections = m_GraphConnections[sourceLayerIndex];
844 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
845 {
846 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000847 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000848 else
849 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100850 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000851 }
852}
Kevin May43a799c2019-02-08 16:31:42 +0000853
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000854void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100855 uint32_t outputSlotIndex,
856 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000857{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100858 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
859 {
860 m_GraphConnections[sourceLayerIndex] = Connections();
861 }
862
863 Connections& connections = m_GraphConnections[sourceLayerIndex];
864 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
865 {
866 throw ParseException("Same output slot index processed twice");
867 }
868
869 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000870}
871
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100872void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
873{
874 CHECK_LAYERS(graph, 0, layerIndex);
875 auto inputs = GetInputs(graph, layerIndex);
876 CHECK_LOCATION();
877 CHECK_VALID_SIZE(inputs.size(), 1);
878
879 auto outputs = GetOutputs(graph, layerIndex);
880 CHECK_VALID_SIZE(outputs.size(), 1);
881
882 auto layerName = GetLayerName(graph, layerIndex);
883
884 IConnectableLayer* layer = m_Network->AddAbsLayer(layerName.c_str());
885 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
886 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
887
888 RegisterInputSlots(graph, layerIndex, layer);
889 RegisterOutputSlots(graph, layerIndex, layer);
890}
891
Derek Lamberti8ddae332019-02-21 16:29:43 +0000892void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000893{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000894 CHECK_LAYERS(graph, 0, layerIndex);
895 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000896 CHECK_LOCATION();
897 CHECK_VALID_SIZE(inputs.size(), 1);
898
Derek Lamberti8ddae332019-02-21 16:29:43 +0000899 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000900 CHECK_VALID_SIZE(outputs.size(), 1);
901
Derek Lamberti8ddae332019-02-21 16:29:43 +0000902 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000903 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000904 auto serializerDescriptor = serializerLayer->descriptor();
905
906 armnn::ActivationDescriptor descriptor;
907 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
908 descriptor.m_A = serializerDescriptor->a();
909 descriptor.m_B = serializerDescriptor->b();
910
911 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
912 layerName.c_str());
913 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
914 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
915
Derek Lamberti8ddae332019-02-21 16:29:43 +0000916 RegisterInputSlots(graph, layerIndex, layer);
917 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000918}
919
Derek Lamberti8ddae332019-02-21 16:29:43 +0000920void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000921{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000922 CHECK_LAYERS(graph, 0, layerIndex);
923 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000924 CHECK_LOCATION();
925 CHECK_VALID_SIZE(inputs.size(), 2);
926
Derek Lamberti8ddae332019-02-21 16:29:43 +0000927 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000928 CHECK_VALID_SIZE(outputs.size(), 1);
929
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000930 auto layerName = GetLayerName(graph, layerIndex);
931 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000932
933 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
934 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
935
Derek Lamberti8ddae332019-02-21 16:29:43 +0000936 RegisterInputSlots(graph, layerIndex, layer);
937 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000938}
939
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100940void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
941{
942 CHECK_LAYERS(graph, 0, layerIndex);
943 auto inputs = GetInputs(graph, layerIndex);
944 CHECK_LOCATION();
945 CHECK_VALID_SIZE(inputs.size(), 1);
946
947 auto outputs = GetOutputs(graph, layerIndex);
948 CHECK_VALID_SIZE(outputs.size(), 1);
949
950 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
951 auto serializerDescriptor = serializerLayer->descriptor();
952
953 armnn::ArgMinMaxDescriptor descriptor;
954 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->function());
955 descriptor.m_Axis = serializerDescriptor->axis();
956 auto layerName = GetLayerName(graph, layerIndex);
957 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
958
959 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
960 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
961
962 RegisterInputSlots(graph, layerIndex, layer);
963 RegisterOutputSlots(graph, layerIndex, layer);
964}
965
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000966void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
967{
968 CHECK_LAYERS(graph, 0, layerIndex);
969
970 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
971 CHECK_VALID_SIZE(inputs.size(), 1);
972
973 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
974 CHECK_VALID_SIZE(outputs.size(), 1);
975
976 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
977 auto flatBufferCrops = flatBufferDescriptor->crops();
978 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
979
980 if (flatBufferCrops->Length() % 2 != 0)
981 {
982 throw ParseException(boost::str(
983 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
984 }
985
986 std::vector<std::pair<unsigned int, unsigned int>> crops;
987 crops.reserve(flatBufferCrops->Length() / 2);
988 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
989 {
990 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
991 }
992
993 armnn::BatchToSpaceNdDescriptor descriptor;
994 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
995 descriptor.m_BlockShape =
996 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
997 descriptor.m_Crops = crops;
998
999 auto layerName = GetLayerName(graph, layerIndex);
1000 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1001
1002 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1003 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1004
1005 RegisterInputSlots(graph, layerIndex, layer);
1006 RegisterOutputSlots(graph, layerIndex, layer);
1007}
1008
ruoyan018e7fa232019-02-28 15:09:07 +00001009void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1010{
1011 CHECK_LAYERS(graph, 0, layerIndex);
1012
1013 auto inputs = GetInputs(graph, layerIndex);
1014 CHECK_VALID_SIZE(inputs.size(), 1);
1015
1016 auto outputs = GetOutputs(graph, layerIndex);
1017 CHECK_VALID_SIZE(outputs.size(), 1);
1018 auto outputInfo = ToTensorInfo(outputs[0]);
1019
ruoyan015c7ab052019-03-04 14:48:02 +00001020 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001021
1022 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1023 auto serializerDescriptor = serializerLayer->descriptor();
1024
1025 armnn::BatchNormalizationDescriptor descriptor;
1026 descriptor.m_Eps = serializerDescriptor->eps();
1027 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1028
1029 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1030 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1031 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1032 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1033
1034 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1035 mean,
1036 variance,
1037 beta,
1038 gamma,
1039 layerName.c_str());
1040 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1041
1042 RegisterInputSlots(graph, layerIndex, layer);
1043 RegisterOutputSlots(graph, layerIndex, layer);
1044}
1045
Conor Kennedy76277882019-02-26 08:29:54 +00001046void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1047{
1048 CHECK_LAYERS(graph, 0, layerIndex);
1049 CHECK_LOCATION();
1050
1051 auto outputs = GetOutputs(graph, layerIndex);
1052 CHECK_VALID_SIZE(outputs.size(), 1);
1053
1054 auto layerName = GetLayerName(graph, layerIndex);
1055
1056 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1057 auto serializerInput = serializerLayer->input();
1058
1059 armnn::ConstTensor input = ToConstTensor(serializerInput);
1060
1061 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1062
1063 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1064 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1065
1066 RegisterOutputSlots(graph, layerIndex, layer);
1067}
1068
Derek Lamberti8ddae332019-02-21 16:29:43 +00001069void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001070{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001071 CHECK_LAYERS(graph, 0, layerIndex);
1072 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001073 CHECK_LOCATION();
1074 CHECK_VALID_SIZE(inputs.size(), 1);
1075
Derek Lamberti8ddae332019-02-21 16:29:43 +00001076 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001077 CHECK_VALID_SIZE(outputs.size(), 1);
1078
Derek Lamberti8ddae332019-02-21 16:29:43 +00001079 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001080 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001081 auto serializerDescriptor = serializerLayer->descriptor();
1082
1083 armnn::Convolution2dDescriptor descriptor;
1084 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1085 descriptor.m_PadRight = serializerDescriptor->padRight();
1086 descriptor.m_PadTop = serializerDescriptor->padTop();
1087 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1088 descriptor.m_StrideX = serializerDescriptor->strideX();
1089 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001090 descriptor.m_DilationX = serializerDescriptor->dilationX();
1091 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001092 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1093 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1094
1095 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1096 armnn::ConstTensor biases;
1097
Matteo Martincighfc598e12019-05-14 10:36:13 +01001098 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001099 if (descriptor.m_BiasEnabled)
1100 {
1101 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001102 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001103 }
1104 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1105 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001106 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001107 layerName.c_str());
1108 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1109 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1110
Derek Lamberti8ddae332019-02-21 16:29:43 +00001111 RegisterInputSlots(graph, layerIndex, layer);
1112 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001113}
1114
Derek Lamberti8ddae332019-02-21 16:29:43 +00001115void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001116{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001117 CHECK_LAYERS(graph, 0, layerIndex);
1118 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001119 CHECK_LOCATION();
1120 CHECK_VALID_SIZE(inputs.size(), 1);
1121
Derek Lamberti8ddae332019-02-21 16:29:43 +00001122 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001123 CHECK_VALID_SIZE(outputs.size(), 1);
1124
Derek Lamberti8ddae332019-02-21 16:29:43 +00001125 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001126 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001127 auto serializerDescriptor = serializerLayer->descriptor();
1128
1129 armnn::DepthwiseConvolution2dDescriptor descriptor;
1130 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1131 descriptor.m_PadRight = serializerDescriptor->padRight();
1132 descriptor.m_PadTop = serializerDescriptor->padTop();
1133 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1134 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001135 descriptor.m_StrideY = serializerDescriptor->strideY();
1136 descriptor.m_DilationX = serializerDescriptor->dilationX();
1137 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001138 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1139 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1140
1141 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1142 armnn::ConstTensor biases;
1143
Matteo Martincighfc598e12019-05-14 10:36:13 +01001144 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001145 if (descriptor.m_BiasEnabled)
1146 {
1147 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001148 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001149 }
1150 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1151 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001152 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001153 layerName.c_str());
1154
1155 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1156 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1157
Derek Lamberti8ddae332019-02-21 16:29:43 +00001158 RegisterInputSlots(graph, layerIndex, layer);
1159 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001160}
1161
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001162void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1163{
1164 CHECK_LAYERS(graph, 0, layerIndex);
1165 auto inputs = GetInputs(graph, layerIndex);
1166 CHECK_LOCATION();
1167 CHECK_VALID_SIZE(inputs.size(), 2);
1168
1169 auto outputs = GetOutputs(graph, layerIndex);
1170 CHECK_VALID_SIZE(outputs.size(), 4);
1171
1172 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1173 auto layerName = GetLayerName(graph, layerIndex);
1174 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1175
1176 armnn::DetectionPostProcessDescriptor descriptor;
1177 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1178 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1179 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1180 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1181 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1182 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1183 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1184 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1185 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1186 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1187 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1188
1189 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1190
1191 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1192 anchors,
1193 layerName.c_str());
1194
1195 for (unsigned int i = 0; i < 4; i++)
1196 {
1197 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1198 }
1199
1200 RegisterInputSlots(graph, layerIndex, layer);
1201 RegisterOutputSlots(graph, layerIndex, layer);
1202}
1203
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001204void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1205{
1206 CHECK_LAYERS(graph, 0, layerIndex);
1207 auto inputs = GetInputs(graph, layerIndex);
1208 CHECK_LOCATION();
1209 CHECK_VALID_SIZE(inputs.size(), 2);
1210
1211 auto outputs = GetOutputs(graph, layerIndex);
1212 CHECK_VALID_SIZE(outputs.size(), 1);
1213
1214 auto layerName = GetLayerName(graph, layerIndex);
1215 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1216
1217 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1218 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1219
1220 RegisterInputSlots(graph, layerIndex, layer);
1221 RegisterOutputSlots(graph, layerIndex, layer);
1222}
1223
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001224void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1225{
1226 CHECK_LAYERS(graph, 0, layerIndex);
1227 auto inputs = GetInputs(graph, layerIndex);
1228 CHECK_LOCATION();
1229 CHECK_VALID_SIZE(inputs.size(), 2);
1230
1231 auto outputs = GetOutputs(graph, layerIndex);
1232 CHECK_VALID_SIZE(outputs.size(), 1);
1233
1234 auto layerName = GetLayerName(graph, layerIndex);
1235 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1236
1237 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1238 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1239
1240 RegisterInputSlots(graph, layerIndex, layer);
1241 RegisterOutputSlots(graph, layerIndex, layer);
1242}
1243
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001244void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1245{
1246 CHECK_LAYERS(graph, 0, layerIndex);
1247 auto inputs = GetInputs(graph, layerIndex);
1248 CHECK_LOCATION();
1249 CHECK_VALID_SIZE(inputs.size(), 2);
1250
1251 auto outputs = GetOutputs(graph, layerIndex);
1252 CHECK_VALID_SIZE(outputs.size(), 1);
1253
1254 auto layerName = GetLayerName(graph, layerIndex);
1255 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1256
1257 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1258 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1259
1260 RegisterInputSlots(graph, layerIndex, layer);
1261 RegisterOutputSlots(graph, layerIndex, layer);
1262}
1263
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001264void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1265{
1266 CHECK_LAYERS(graph, 0, layerIndex);
1267
1268 auto inputs = GetInputs(graph, layerIndex);
1269 CHECK_VALID_SIZE(inputs.size(), 1);
1270
1271 auto outputs = GetOutputs(graph, layerIndex);
1272 CHECK_VALID_SIZE(outputs.size(), 1);
1273 auto outputInfo = ToTensorInfo(outputs[0]);
1274
1275 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1276 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1277
1278 auto layerName = GetLayerName(graph, layerIndex);
1279 armnn::L2NormalizationDescriptor descriptor;
1280 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001281 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001282
1283 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1284 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1285
1286 RegisterInputSlots(graph, layerIndex, layer);
1287 RegisterOutputSlots(graph, layerIndex, layer);
1288}
1289
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001290void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1291{
1292 CHECK_LAYERS(graph, 0, layerIndex);
1293 auto inputs = GetInputs(graph, layerIndex);
1294 CHECK_LOCATION();
1295 CHECK_VALID_SIZE(inputs.size(), 2);
1296
1297 auto outputs = GetOutputs(graph, layerIndex);
1298 CHECK_VALID_SIZE(outputs.size(), 1);
1299
1300 auto layerName = GetLayerName(graph, layerIndex);
1301 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1302
1303 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1304 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1305
1306 RegisterInputSlots(graph, layerIndex, layer);
1307 RegisterOutputSlots(graph, layerIndex, layer);
1308}
1309
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001310void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1311{
1312 CHECK_LAYERS(graph, 0, layerIndex);
1313 auto inputs = GetInputs(graph, layerIndex);
1314 CHECK_LOCATION();
1315 CHECK_VALID_SIZE(inputs.size(), 2);
1316
1317 auto outputs = GetOutputs(graph, layerIndex);
1318 CHECK_VALID_SIZE(outputs.size(), 1);
1319
1320 auto layerName = GetLayerName(graph, layerIndex);
1321 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1322
1323 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1324 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1325
1326 RegisterInputSlots(graph, layerIndex, layer);
1327 RegisterOutputSlots(graph, layerIndex, layer);
1328}
1329
Jim Flynne242f2d2019-05-22 14:24:13 +01001330const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1331 unsigned int layerIndex)
1332{
1333 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1334
1335 switch (layerType)
1336 {
1337 case Layer::Layer_ConcatLayer:
1338 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1339 case Layer::Layer_MergerLayer:
1340 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1341 default:
1342 throw armnn::Exception("unknown layer type, should be concat or merger");
1343 }
1344}
1345
Jim Flynn906f9462019-05-10 13:55:21 +01001346void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001347{
1348 CHECK_LAYERS(graph, 0, layerIndex);
1349 CHECK_LOCATION();
1350
1351 auto outputs = GetOutputs(graph, layerIndex);
1352 CHECK_VALID_SIZE(outputs.size(), 1);
1353
Jim Flynnac25a1b2019-02-28 10:40:49 +00001354 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001355 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1356 unsigned int numViews = originsDescriptor->numViews();
1357 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001358
1359 // can now check the number of inputs == number of views
1360 auto inputs = GetInputs(graph, layerIndex);
1361 CHECK_VALID_SIZE(inputs.size(), numViews);
1362
1363 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001364 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001365 for (unsigned int v = 0; v < numViews; ++v)
1366 {
1367 auto originPtr = originsPtr->Get(v);
1368 for (unsigned int d = 0; d < numDimensions; ++d)
1369 {
1370 uint32_t value = originPtr->data()->Get(d);
1371 descriptor.SetViewOriginCoord(v, d, value);
1372 }
1373 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001374 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001375
Jim Flynn906f9462019-05-10 13:55:21 +01001376 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001377 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1378 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1379
1380 RegisterInputSlots(graph, layerIndex, layer);
1381 RegisterOutputSlots(graph, layerIndex, layer);
1382}
1383
Derek Lamberti8ddae332019-02-21 16:29:43 +00001384void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001385{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001386 CHECK_LAYERS(graph, 0, layerIndex);
1387 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001388 CHECK_LOCATION();
1389 CHECK_VALID_SIZE(inputs.size(), 2);
1390
Derek Lamberti8ddae332019-02-21 16:29:43 +00001391 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001392 CHECK_VALID_SIZE(outputs.size(), 1);
1393
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001394 auto layerName = GetLayerName(graph, layerIndex);
1395 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001396
1397 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1398 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1399
Derek Lamberti8ddae332019-02-21 16:29:43 +00001400 RegisterInputSlots(graph, layerIndex, layer);
1401 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001402}
1403
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001404void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1405{
1406 CHECK_LAYERS(graph, 0, layerIndex);
1407 CHECK_LOCATION();
1408
1409 auto inputs = GetInputs(graph, layerIndex);
1410 CHECK_VALID_SIZE(inputs.size(), 1);
1411
1412 auto outputs = GetOutputs(graph, layerIndex);
1413 CHECK_VALID_SIZE(outputs.size(), 1);
1414
1415 auto layerName = GetLayerName(graph, layerIndex);
1416
1417 armnn::IConnectableLayer* layer;
1418
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001419 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001420
1421 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1422 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1423
1424 RegisterInputSlots(graph, layerIndex, layer);
1425 RegisterOutputSlots(graph, layerIndex, layer);
1426}
1427
Derek Lamberti8ddae332019-02-21 16:29:43 +00001428void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001429{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001430 CHECK_LAYERS(graph, 0, layerIndex);
1431 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001432 CHECK_LOCATION();
1433 CHECK_VALID_SIZE(inputs.size(), 1);
1434
Derek Lamberti8ddae332019-02-21 16:29:43 +00001435 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001436 CHECK_VALID_SIZE(outputs.size(), 1);
1437
Derek Lamberti8ddae332019-02-21 16:29:43 +00001438 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001439 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001440 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1441
1442 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1443 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1444 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1445
1446 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1447
1448 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001449 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001450 if (flatBufferDescriptor->biasEnabled())
1451 {
1452 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001453 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001454 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001455 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1456 weightsTensor,
1457 optionalBiases,
1458 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001459
1460 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1461 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1462
Derek Lamberti8ddae332019-02-21 16:29:43 +00001463 RegisterInputSlots(graph, layerIndex, layer);
1464 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001465}
1466
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001467void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1468{
1469 CHECK_LAYERS(graph, 0, layerIndex);
1470
1471 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1472 CHECK_VALID_SIZE(inputs.size(), 1);
1473
1474 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1475 CHECK_VALID_SIZE(outputs.size(), 1);
1476
1477 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1478 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001479 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001480
1481 if (flatBufferPadList->Length() % 2 != 0)
1482 {
1483 throw ParseException(boost::str(
1484 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1485 }
1486
1487 std::vector<std::pair<unsigned int, unsigned int>> padList;
1488 padList.reserve(flatBufferPadList->Length() / 2);
1489 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1490 {
1491 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1492 }
1493
David Monahan34757812019-06-19 11:47:21 +01001494 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001495
1496 auto layerName = GetLayerName(graph, layerIndex);
1497 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1498
1499 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1500 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1501
1502 RegisterInputSlots(graph, layerIndex, layer);
1503 RegisterOutputSlots(graph, layerIndex, layer);
1504}
1505
Derek Lamberti8ddae332019-02-21 16:29:43 +00001506void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001507{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001508 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001509
1510 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001511 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001512
Derek Lamberti8ddae332019-02-21 16:29:43 +00001513 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001514 CHECK_VALID_SIZE(inputs.size(), 1);
1515
Derek Lamberti8ddae332019-02-21 16:29:43 +00001516 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001517 CHECK_VALID_SIZE(outputs.size(), 1);
1518 auto outputInfo = ToTensorInfo(outputs[0]);
1519
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001520 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001521 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1522
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001523 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001524 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1525
Derek Lamberti8ddae332019-02-21 16:29:43 +00001526 RegisterInputSlots(graph, layerIndex, layer);
1527 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001528}
1529
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001530armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001531 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001532{
1533 armnn::Pooling2dDescriptor desc;
1534
1535 switch (pooling2dDesc->poolType())
1536 {
1537 case PoolingAlgorithm_Average:
1538 {
1539 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001540 break;
1541 }
1542 case PoolingAlgorithm_Max:
1543 {
1544 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001545 break;
1546 }
1547 default:
1548 {
1549 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1550 }
1551 }
1552
1553 switch (pooling2dDesc->outputShapeRounding())
1554 {
1555 case OutputShapeRounding_Floor:
1556 {
1557 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1558 break;
1559 }
1560 case OutputShapeRounding_Ceiling:
1561 {
1562 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1563 break;
1564 }
1565 default:
1566 {
1567 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1568 }
1569 }
1570
1571 switch (pooling2dDesc->paddingMethod())
1572 {
1573 case PaddingMethod_Exclude:
1574 {
1575 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1576 break;
1577 }
1578 case PaddingMethod_IgnoreValue:
1579 {
1580 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1581 break;
1582 }
1583 default:
1584 {
1585 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1586 }
1587 }
1588
1589 switch (pooling2dDesc->dataLayout())
1590 {
1591 case DataLayout_NCHW:
1592 {
1593 desc.m_DataLayout = armnn::DataLayout::NCHW;
1594 break;
1595 }
1596 case DataLayout_NHWC:
1597 {
1598 desc.m_DataLayout = armnn::DataLayout::NHWC;
1599 break;
1600 }
1601 default:
1602 {
1603 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1604 }
1605 }
1606
1607 desc.m_PadRight = pooling2dDesc->padRight();
1608 desc.m_PadLeft = pooling2dDesc->padLeft();
1609 desc.m_PadBottom = pooling2dDesc->padBottom();
1610 desc.m_PadTop = pooling2dDesc->padTop();
1611 desc.m_StrideX = pooling2dDesc->strideX();
1612 desc.m_StrideY = pooling2dDesc->strideY();
1613 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1614 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1615
1616 return desc;
1617}
1618
Derek Lamberti8ddae332019-02-21 16:29:43 +00001619void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001620{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001621 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001622
Derek Lamberti8ddae332019-02-21 16:29:43 +00001623 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001624 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001625 CHECK_VALID_SIZE(inputs.size(), 1);
1626
Derek Lamberti8ddae332019-02-21 16:29:43 +00001627 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001628 CHECK_VALID_SIZE(outputs.size(), 1);
1629 auto outputInfo = ToTensorInfo(outputs[0]);
1630
1631 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001632 auto layerName = GetLayerName(graph, layerIndex);
1633 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001634 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1635
Derek Lamberti8ddae332019-02-21 16:29:43 +00001636 RegisterInputSlots(graph, layerIndex, layer);
1637 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001638}
1639
Derek Lamberti87acb272019-03-27 16:51:31 +00001640void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1641{
1642 CHECK_LAYERS(graph, 0, layerIndex);
1643
1644 auto inputs = GetInputs(graph, layerIndex);
1645 CHECK_VALID_SIZE(inputs.size(), 1);
1646
1647 auto outputs = GetOutputs(graph, layerIndex);
1648 CHECK_VALID_SIZE(outputs.size(), 1);
1649 auto outputInfo = ToTensorInfo(outputs[0]);
1650
1651 auto layerName = GetLayerName(graph, layerIndex);
1652 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1653 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1654
1655 RegisterInputSlots(graph, layerIndex, layer);
1656 RegisterOutputSlots(graph, layerIndex, layer);
1657}
1658
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001659armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001660 const std::vector<uint32_t>& targetDimsIn)
1661{
1662 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1663 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1664
1665 if (stretchDim != targetDimsIn.end())
1666 {
1667 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1668 {
1669 throw ParseException(boost::str(
1670 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1671 }
1672
1673 auto targetNumElements =
1674 boost::numeric_cast<unsigned int>(
1675 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1676
1677 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1678 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1679 }
1680
1681 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1682
1683 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1684 reshapeInfo.SetShape(outputShape);
1685
1686 return reshapeInfo;
1687}
1688
Derek Lamberti8ddae332019-02-21 16:29:43 +00001689void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001690{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001691 CHECK_LAYERS(graph, 0, layerIndex);
1692 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001693
Derek Lamberti8ddae332019-02-21 16:29:43 +00001694 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001695 CHECK_VALID_SIZE(outputs.size(), 1);
1696
1697 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1698 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1699
Derek Lamberti8ddae332019-02-21 16:29:43 +00001700 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001701 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1702
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001703 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001704 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1705
1706 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1707 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1708
1709 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1710 {
1711 std::stringstream ss;
1712 ss << "New shape defined in reshape parameters "
1713 << reshapeOutputTensorShape
1714 << " does not equal output shape "
1715 << actualOutputTensorInfo.GetShape()
1716 << ": "
1717 << CHECK_LOCATION().AsString();
1718 throw ParseException(ss.str());
1719 }
1720
1721 armnn::ReshapeDescriptor reshapeDesc;
1722 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1723
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001724 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001725 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1726 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1727
Derek Lamberti8ddae332019-02-21 16:29:43 +00001728 RegisterInputSlots(graph, layerIndex, layer);
1729 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001730}
1731
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01001732void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
1733{
1734 CHECK_LAYERS(graph, 0, layerIndex);
1735
1736 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1737 CHECK_VALID_SIZE(inputs.size(), 1);
1738
1739 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1740 CHECK_VALID_SIZE(outputs.size(), 1);
1741
1742 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
1743
1744 armnn::ResizeDescriptor descriptor;
1745 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1746 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1747 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
1748 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1749
1750 auto layerName = GetLayerName(graph, layerIndex);
1751 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
1752
1753 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1754 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1755
1756 RegisterInputSlots(graph, layerIndex, layer);
1757 RegisterOutputSlots(graph, layerIndex, layer);
1758}
1759
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001760void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1761{
1762 CHECK_LAYERS(graph, 0, layerIndex);
1763
1764 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1765 CHECK_VALID_SIZE(inputs.size(), 1);
1766
1767 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1768 CHECK_VALID_SIZE(outputs.size(), 1);
1769
1770 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1771
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001772 armnn::ResizeDescriptor descriptor;
1773 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001774 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001775 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
1776 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001777
1778 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001779 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001780
1781 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1782 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1783
1784 RegisterInputSlots(graph, layerIndex, layer);
1785 RegisterOutputSlots(graph, layerIndex, layer);
1786}
1787
Derek Lamberti8ddae332019-02-21 16:29:43 +00001788void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001789{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001790 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001791
Derek Lamberti8ddae332019-02-21 16:29:43 +00001792 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001793 CHECK_VALID_SIZE(inputs.size(), 1);
1794
Derek Lamberti8ddae332019-02-21 16:29:43 +00001795 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001796 CHECK_VALID_SIZE(outputs.size(), 1);
1797
1798 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001799 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001800 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001801
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001802 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1803
1804 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1805 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1806
Derek Lamberti8ddae332019-02-21 16:29:43 +00001807 RegisterInputSlots(graph, layerIndex, layer);
1808 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001809}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001810
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001811void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1812{
1813 CHECK_LAYERS(graph, 0, layerIndex);
1814
1815 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1816 CHECK_VALID_SIZE(inputs.size(), 1);
1817
1818 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1819 CHECK_VALID_SIZE(outputs.size(), 1);
1820
1821 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1822 auto flatBufferPadList = flatBufferDescriptor->padList();
1823 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1824
1825 if (flatBufferPadList->Length() % 2 != 0)
1826 {
1827 throw ParseException(boost::str(
1828 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1829 }
1830
1831 std::vector<std::pair<unsigned int, unsigned int>> padList;
1832 padList.reserve(flatBufferPadList->Length() / 2);
1833 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1834 {
1835 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1836 }
1837
1838 armnn::SpaceToBatchNdDescriptor descriptor;
1839 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1840 descriptor.m_BlockShape =
1841 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1842 descriptor.m_PadList = padList;
1843
1844 auto layerName = GetLayerName(graph, layerIndex);
1845 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1846
1847 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1848 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1849
1850 RegisterInputSlots(graph, layerIndex, layer);
1851 RegisterOutputSlots(graph, layerIndex, layer);
1852}
1853
Aron Virginas-Taraa067142019-06-11 16:01:44 +01001854void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
1855{
1856 CHECK_LAYERS(graph, 0, layerIndex);
1857
1858 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1859 CHECK_VALID_SIZE(inputs.size(), 1);
1860
1861 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1862 CHECK_VALID_SIZE(outputs.size(), 1);
1863
1864 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
1865
1866 armnn::SpaceToDepthDescriptor descriptor;
1867 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
1868 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1869
1870 auto layerName = GetLayerName(graph, layerIndex);
1871 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
1872
1873 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1874 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1875
1876 RegisterInputSlots(graph, layerIndex, layer);
1877 RegisterOutputSlots(graph, layerIndex, layer);
1878}
1879
Nina Drozd57728782019-02-27 10:53:27 +00001880armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1881 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1882 unsigned int layerIndex)
1883{
1884 armnn::NormalizationDescriptor desc;
1885
1886 switch (normalizationDescriptor->normChannelType())
1887 {
1888 case NormalizationAlgorithmChannel_Across:
1889 {
1890 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1891 break;
1892 }
1893 case NormalizationAlgorithmChannel_Within:
1894 {
1895 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1896 break;
1897 }
1898 default:
1899 {
1900 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1901 }
1902 }
1903
1904 switch (normalizationDescriptor->normMethodType())
1905 {
1906 case NormalizationAlgorithmMethod_LocalBrightness:
1907 {
1908 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1909 break;
1910 }
1911 case NormalizationAlgorithmMethod_LocalContrast:
1912 {
1913 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1914 break;
1915 }
1916 default:
1917 {
1918 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1919 }
1920 }
1921
1922 switch (normalizationDescriptor->dataLayout())
1923 {
1924 case DataLayout_NCHW:
1925 {
1926 desc.m_DataLayout = armnn::DataLayout::NCHW;
1927 break;
1928 }
1929 case DataLayout_NHWC:
1930 {
1931 desc.m_DataLayout = armnn::DataLayout::NHWC;
1932 break;
1933 }
1934 default:
1935 {
1936 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1937 }
1938 }
1939
1940 desc.m_Alpha = normalizationDescriptor->alpha();
1941 desc.m_Beta = normalizationDescriptor->beta();
1942 desc.m_K = normalizationDescriptor->k();
1943 desc.m_NormSize = normalizationDescriptor->normSize();
1944
1945 return desc;
1946}
1947
1948void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1949{
1950 CHECK_LAYERS(graph, 0, layerIndex);
1951
1952 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1953
1954 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1955 CHECK_VALID_SIZE(inputs.size(), 1);
1956
1957 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1958 CHECK_VALID_SIZE(outputs.size(), 1);
1959
1960 auto outputInfo = ToTensorInfo(outputs[0]);
1961
1962 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1963 auto layerName = GetLayerName(graph, layerIndex);
1964
1965 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1966 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1967
1968 RegisterInputSlots(graph, layerIndex, layer);
1969 RegisterOutputSlots(graph, layerIndex, layer);
1970}
1971
Sadik Armagan8b42a382019-03-01 14:24:49 +00001972void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1973{
1974 CHECK_LAYERS(graph, 0, layerIndex);
1975 auto inputs = GetInputs(graph, layerIndex);
1976 CHECK_LOCATION();
1977 CHECK_VALID_SIZE(inputs.size(), 1);
1978
1979 auto outputs = GetOutputs(graph, layerIndex);
1980 CHECK_VALID_SIZE(outputs.size(), 1);
1981
1982 auto layerName = GetLayerName(graph, layerIndex);
1983 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1984
1985 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1986 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1987
1988 RegisterInputSlots(graph, layerIndex, layer);
1989 RegisterOutputSlots(graph, layerIndex, layer);
1990}
1991
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001992void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1993{
1994 CHECK_LAYERS(graph, 0, layerIndex);
1995
1996 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1997 CHECK_VALID_SIZE(inputs.size(), 1);
1998
1999 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2000 CHECK_VALID_SIZE(outputs.size(), 1);
2001
2002 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2003
2004 auto flatBufferBegin = flatBufferDescriptor->begin();
2005 auto flatBufferEnd = flatBufferDescriptor->end();
2006 auto flatBufferStride = flatBufferDescriptor->stride();
2007
2008 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2009 flatBufferBegin->Length() == flatBufferStride->Length()))
2010 {
2011 throw ParseException(boost::str(
2012 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2013 }
2014
2015 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2016 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2017 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2018
2019 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2020 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2021 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2022 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2023 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2024 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2025 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2026
2027 auto layerName = GetLayerName(graph, layerIndex);
2028 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2029
2030 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2031 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2032
2033 RegisterInputSlots(graph, layerIndex, layer);
2034 RegisterOutputSlots(graph, layerIndex, layer);
2035}
2036
Conor Kennedyda1f9752019-03-01 14:37:12 +00002037void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2038{
2039 CHECK_LAYERS(graph, 0, layerIndex);
2040 auto inputs = GetInputs(graph, layerIndex);
2041 CHECK_LOCATION();
2042 CHECK_VALID_SIZE(inputs.size(), 2);
2043
2044 auto outputs = GetOutputs(graph, layerIndex);
2045 CHECK_VALID_SIZE(outputs.size(), 1);
2046
2047 auto layerName = GetLayerName(graph, layerIndex);
2048 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2049
2050 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2051 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2052
2053 RegisterInputSlots(graph, layerIndex, layer);
2054 RegisterOutputSlots(graph, layerIndex, layer);
2055}
2056
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002057void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2058{
2059 CHECK_LAYERS(graph, 0, layerIndex);
2060
2061 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2062 CHECK_VALID_SIZE(inputs.size(), 2);
2063
2064 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2065 CHECK_VALID_SIZE(outputs.size(), 1);
2066
2067 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002068 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2069
2070 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002071 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2072
2073 RegisterInputSlots(graph, layerIndex, layer);
2074 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002075}
2076
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002077void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2078{
2079 CHECK_LAYERS(graph, 0, layerIndex);
2080
2081 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2082 CHECK_VALID_SIZE(inputs.size(), 1);
2083
2084 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2085 CHECK_VALID_SIZE(outputs.size(), 1);
2086
2087 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2088 auto flatBufferAxis = flatBufferDescriptor->axis();
2089 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2090
2091 armnn::MeanDescriptor descriptor;
2092 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2093 descriptor.m_KeepDims = flatBufferKeepDims;
2094
2095 auto layerName = GetLayerName(graph, layerIndex);
2096 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2097
2098 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2099 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2100
2101 RegisterInputSlots(graph, layerIndex, layer);
2102 RegisterOutputSlots(graph, layerIndex, layer);
2103}
2104
Jim Flynn18ce3382019-03-08 11:08:30 +00002105void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2106{
2107 CHECK_LAYERS(graph, 0, layerIndex);
2108
2109 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2110 CHECK_VALID_SIZE(inputs.size(), 1);
2111
2112 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2113
2114 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2115 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2116 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2117 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2118 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2119 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2120
2121 // Check numViews and numDimensions corresponds to the ones already serialized ...
2122 // numViews == flatBufferViewSizes.size();
2123 // foreach: numDimensions == flatBufferViewSizes[x].size();
2124
2125 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2126 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2127 {
2128 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2129 {
2130 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2131 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2132 }
2133 }
2134
2135 auto layerName = GetLayerName(graph, layerIndex);
2136 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2137
2138 // I could have as many outputs as views ...
2139 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2140 {
2141 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2142 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2143 }
2144
2145 RegisterInputSlots(graph, layerIndex, layer);
2146 RegisterOutputSlots(graph, layerIndex, layer);
2147}
2148
Jim Flynn11af3752019-03-19 17:22:29 +00002149armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2150{
2151 armnn::LstmDescriptor desc;
2152
2153 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2154 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2155 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2156 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2157 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2158 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002159 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002160
2161 return desc;
2162}
2163
2164void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2165{
2166 CHECK_LAYERS(graph, 0, layerIndex);
2167
2168 auto inputs = GetInputs(graph, layerIndex);
2169 CHECK_VALID_SIZE(inputs.size(), 3);
2170
2171 auto outputs = GetOutputs(graph, layerIndex);
2172 CHECK_VALID_SIZE(outputs.size(), 4);
2173
2174 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2175 auto layerName = GetLayerName(graph, layerIndex);
2176 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2177 auto flatBufferInputParams = flatBufferLayer->inputParams();
2178
2179 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2180
2181 armnn::LstmInputParams lstmInputParams;
2182
2183 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2184 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2185 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2186 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2187 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2188 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2189 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2190 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2191 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2192
2193 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2194 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2195 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2196 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2197 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2198 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2199 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2200 lstmInputParams.m_CellBias = &cellBias;
2201 lstmInputParams.m_OutputGateBias = &outputGateBias;
2202
2203 armnn::ConstTensor inputToInputWeights;
2204 armnn::ConstTensor recurrentToInputWeights;
2205 armnn::ConstTensor cellToInputWeights;
2206 armnn::ConstTensor inputGateBias;
2207 if (!lstmDescriptor.m_CifgEnabled)
2208 {
2209 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2210 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2211 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2212 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2213
2214 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2215 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2216 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2217 lstmInputParams.m_InputGateBias = &inputGateBias;
2218 }
2219
2220 armnn::ConstTensor projectionWeights;
2221 armnn::ConstTensor projectionBias;
2222 if (lstmDescriptor.m_ProjectionEnabled)
2223 {
2224 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2225 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2226
2227 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2228 lstmInputParams.m_ProjectionBias = &projectionBias;
2229 }
2230
2231 armnn::ConstTensor cellToForgetWeights;
2232 armnn::ConstTensor cellToOutputWeights;
2233 if (lstmDescriptor.m_PeepholeEnabled)
2234 {
2235 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2236 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2237
2238 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2239 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2240 }
2241
Jan Eilersf8c62972019-07-17 11:07:49 +01002242 armnn::ConstTensor inputLayerNormWeights;
2243 armnn::ConstTensor forgetLayerNormWeights;
2244 armnn::ConstTensor cellLayerNormWeights;
2245 armnn::ConstTensor outputLayerNormWeights;
2246 if (lstmDescriptor.m_LayerNormEnabled)
2247 {
2248 if (!lstmDescriptor.m_CifgEnabled)
2249 {
2250 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2251 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2252 }
2253 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2254 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2255 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2256
2257 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2258 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2259 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2260 }
2261
Jim Flynn11af3752019-03-19 17:22:29 +00002262 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2263
2264 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2265 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2266
2267 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2268 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2269
2270 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2271 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2272
2273 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2274 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2275
2276 RegisterInputSlots(graph, layerIndex, layer);
2277 RegisterOutputSlots(graph, layerIndex, layer);
2278}
2279
Jan Eilers5b01a892019-07-23 09:47:43 +01002280void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2281{
2282 CHECK_LAYERS(graph, 0, layerIndex);
2283
2284 auto inputs = GetInputs(graph, layerIndex);
2285 CHECK_VALID_SIZE(inputs.size(), 3);
2286
2287 auto outputs = GetOutputs(graph, layerIndex);
2288 CHECK_VALID_SIZE(outputs.size(), 2);
2289
2290 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2291 auto layerName = GetLayerName(graph, layerIndex);
2292 auto flatBufferInputParams = flatBufferLayer->inputParams();
2293
2294 armnn::QuantizedLstmInputParams lstmInputParams;
2295
2296 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2297 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2298 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2299 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2300 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2301 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2302 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2303 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2304 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2305 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2306 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2307 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2308
2309 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2310 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2311 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2312 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2313 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2314 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2315 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2316 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2317 lstmInputParams.m_InputGateBias = &inputGateBias;
2318 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2319 lstmInputParams.m_CellBias = &cellBias;
2320 lstmInputParams.m_OutputGateBias = &outputGateBias;
2321
2322 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2323
2324 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2325 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2326
2327 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2328 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2329
2330 RegisterInputSlots(graph, layerIndex, layer);
2331 RegisterOutputSlots(graph, layerIndex, layer);
2332}
2333
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002334void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2335{
2336 CHECK_LAYERS(graph, 0, layerIndex);
2337
2338 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2339 CHECK_VALID_SIZE(inputs.size(), 1);
2340
2341 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2342 CHECK_VALID_SIZE(outputs.size(), 1);
2343
2344 const std::string layerName = GetLayerName(graph, layerIndex);
2345 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2346
2347 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2348 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2349
2350 RegisterInputSlots(graph, layerIndex, layer);
2351 RegisterOutputSlots(graph, layerIndex, layer);
2352}
2353
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002354void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2355{
2356 CHECK_LAYERS(graph, 0, layerIndex);
2357
2358 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2359 CHECK_VALID_SIZE(inputs.size(), 2);
2360
2361 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2362 CHECK_VALID_SIZE(outputs.size(), 1);
2363
2364 const std::string layerName = GetLayerName(graph, layerIndex);
2365 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2366
2367 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2368 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2369
2370 RegisterInputSlots(graph, layerIndex, layer);
2371 RegisterOutputSlots(graph, layerIndex, layer);
2372}
2373
Sadik Armaganeff363d2019-04-05 15:25:46 +01002374void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2375{
2376 CHECK_LAYERS(graph, 0, layerIndex);
2377 auto inputs = GetInputs(graph, layerIndex);
2378 CHECK_LOCATION();
2379 CHECK_VALID_SIZE(inputs.size(), 2);
2380
2381 auto outputs = GetOutputs(graph, layerIndex);
2382 CHECK_VALID_SIZE(outputs.size(), 2);
2383
2384 auto layerName = GetLayerName(graph, layerIndex);
2385 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2386
2387 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2388 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2389
2390 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2391 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2392
2393 RegisterInputSlots(graph, layerIndex, layer);
2394 RegisterOutputSlots(graph, layerIndex, layer);
2395}
2396
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002397void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2398{
2399 CHECK_LAYERS(graph, 0, layerIndex);
2400 auto inputs = GetInputs(graph, layerIndex);
2401 CHECK_LOCATION();
2402 CHECK_VALID_SIZE(inputs.size(), 2);
2403
2404 auto outputs = GetOutputs(graph, layerIndex);
2405 CHECK_VALID_SIZE(outputs.size(), 1);
2406
2407 auto layerName = GetLayerName(graph, layerIndex);
2408 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2409
2410 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2411 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2412
2413 RegisterInputSlots(graph, layerIndex, layer);
2414 RegisterOutputSlots(graph, layerIndex, layer);
2415}
2416
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002417void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2418{
2419 CHECK_LAYERS(graph, 0, layerIndex);
2420
2421 auto inputs = GetInputs(graph, layerIndex);
2422 CHECK_VALID_SIZE(inputs.size(), 1);
2423
2424 auto outputs = GetOutputs(graph, layerIndex);
2425 CHECK_VALID_SIZE(outputs.size(), 1);
2426
2427 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2428 auto layerName = GetLayerName(graph, layerIndex);
2429 auto serializerDescriptor = serializerLayer->descriptor();
2430
2431 armnn::TransposeConvolution2dDescriptor descriptor;
2432 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2433 descriptor.m_PadRight = serializerDescriptor->padRight();
2434 descriptor.m_PadTop = serializerDescriptor->padTop();
2435 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2436 descriptor.m_StrideX = serializerDescriptor->strideX();
2437 descriptor.m_StrideY = serializerDescriptor->strideY();;
2438 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2439 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2440
2441 // weights & biases
2442 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2443 armnn::Optional<armnn::ConstTensor> optionalBiases;
2444 if (descriptor.m_BiasEnabled)
2445 {
2446 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2447 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2448 }
2449
2450 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2451 weights,
2452 optionalBiases,
2453 layerName.c_str());
2454
2455 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2456 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2457
2458 RegisterInputSlots(graph, layerIndex, layer);
2459 RegisterOutputSlots(graph, layerIndex, layer);
2460}
2461
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002462void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2463{
2464 CHECK_LAYERS(graph, 0, layerIndex);
2465 auto inputs = GetInputs(graph, layerIndex);
2466
2467 auto outputs = GetOutputs(graph, layerIndex);
2468 CHECK_VALID_SIZE(outputs.size(), 1);
2469
2470 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2471 unsigned int axis = flatBufferDescriptor->axis();
2472 unsigned int numInputs = flatBufferDescriptor->numInputs();
2473 CHECK_VALID_SIZE(inputs.size(), numInputs);
2474
2475 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2476 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2477 flatBufferInputShape->begin() + flatBufferInputShape->size());
2478
2479 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2480 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2481
2482 for (unsigned int i=0; i<inputs.size(); ++i)
2483 {
2484 armnn::TensorShape& inputShape = ToTensorInfo(inputs[i]).GetShape();
2485 if (descriptor.m_InputShape != inputShape)
2486 {
2487 std::stringstream ss;
2488 ss << "Shape of input "
2489 << i
2490 << " "
2491 << inputShape
2492 << " does not equal defined input shape "
2493 << descriptor.m_InputShape
2494 << ": "
2495 << CHECK_LOCATION().AsString();
2496 throw ParseException(ss.str());
2497 }
2498 }
2499
2500 auto layerName = GetLayerName(graph, layerIndex);
2501 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2502
2503 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2504 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2505
2506 RegisterInputSlots(graph, layerIndex, layer);
2507 RegisterOutputSlots(graph, layerIndex, layer);
2508}
2509
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002510} // namespace armnnDeserializer