blob: ef1235745c47070e82ae53a90c989866a697caea [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100192 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000193 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
195 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000196 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000197 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000198 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000199 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000200 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000201 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000202 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000203 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000204 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000205 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000206 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000207 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
208 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100209 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100210 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000211 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000212 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000213 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000214 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000215 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100216 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000217 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100218 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000220 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100221 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000222 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000224 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100225 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000226 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100227 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000228 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000229 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100230 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100231 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000232}
233
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000234Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000235{
236 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
237
238 switch(layerType)
239 {
Mike Kellyaf484012019-02-20 16:53:11 +0000240 case Layer::Layer_ActivationLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000242 case Layer::Layer_AdditionLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000244 case Layer::Layer_BatchToSpaceNdLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000246 case Layer::Layer_BatchNormalizationLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100248 case Layer::Layer_ConcatLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000250 case Layer::Layer_ConstantLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000252 case Layer::Layer_Convolution2dLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000254 case Layer::Layer_DepthwiseConvolution2dLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000256 case Layer::Layer_DequantizeLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000258 case Layer::Layer_DetectionPostProcessLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000260 case Layer::Layer_DivisionLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000262 case Layer::Layer_EqualLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000264 case Layer::Layer_FullyConnectedLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000266 case Layer::Layer_FloorLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000268 case Layer::Layer_GatherLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000270 case Layer::Layer_GreaterLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000272 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000273 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000274 case Layer::Layer_L2NormalizationLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000276 case Layer::Layer_LstmLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000278 case Layer::Layer_MeanLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000280 case Layer::Layer_MinimumLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000282 case Layer::Layer_MaximumLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100284 case Layer::Layer_MergeLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000286 case Layer::Layer_MergerLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000288 case Layer::Layer_MultiplicationLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000290 case Layer::Layer_NormalizationLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000292 case Layer::Layer_OutputLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000294 case Layer::Layer_PadLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000296 case Layer::Layer_PermuteLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000298 case Layer::Layer_Pooling2dLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100300 case Layer::Layer_PreluLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000302 case Layer::Layer_QuantizeLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100304 case Layer::Layer_QuantizedLstmLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000306 case Layer::Layer_ReshapeLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000308 case Layer::Layer_ResizeBilinearLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100310 case Layer::Layer_ResizeLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000312 case Layer::Layer_RsqrtLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000314 case Layer::Layer_SoftmaxLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000316 case Layer::Layer_SpaceToBatchNdLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100318 case Layer::Layer_SpaceToDepthLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000320 case Layer::Layer_SplitterLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100322 case Layer::Layer_StackLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000324 case Layer::Layer_StridedSliceLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000326 case Layer::Layer_SubtractionLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100328 case Layer::Layer_SwitchLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100330 case Layer::Layer_TransposeConvolution2dLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000332 case Layer::Layer_NONE:
333 default:
334 throw ParseException(boost::str(
335 boost::format("Layer must have a type %1%") %
336 Layer::Layer_NONE));
337 }
338}
339
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000340std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
341{
342 auto layer = GetBaseLayer(graph, index);
343 assert(layer);
344 return layer->layerName()->str();
345}
346
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000347int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000348{
349 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
350
351 if (layerType == Layer::Layer_InputLayer)
352 {
353 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
354 }
355 else if ( layerType == Layer::Layer_OutputLayer )
356 {
357 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
358 }
359 return 0;
360}
361
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000362armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000363{
364 switch (dataLayout)
365 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000366 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000367 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000368 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000369 default:
370 return armnn::DataLayout::NCHW;
371 }
372}
373
Mike Kellyaf484012019-02-20 16:53:11 +0000374armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
375{
376 switch (function)
377 {
378 case armnnSerializer::ActivationFunction_Sigmoid:
379 return armnn::ActivationFunction::Sigmoid;
380 case armnnSerializer::ActivationFunction_TanH:
381 return armnn::ActivationFunction::TanH;
382 case armnnSerializer::ActivationFunction_Linear:
383 return armnn::ActivationFunction::Linear;
384 case armnnSerializer::ActivationFunction_ReLu:
385 return armnn::ActivationFunction::ReLu;
386 case armnnSerializer::ActivationFunction_BoundedReLu:
387 return armnn::ActivationFunction::BoundedReLu;
388 case armnnSerializer::ActivationFunction_LeakyReLu:
389 return armnn::ActivationFunction::LeakyReLu;
390 case armnnSerializer::ActivationFunction_Abs:
391 return armnn::ActivationFunction::Abs;
392 case armnnSerializer::ActivationFunction_Sqrt:
393 return armnn::ActivationFunction::Sqrt;
394 case armnnSerializer::ActivationFunction_Square:
395 return armnn::ActivationFunction::Square;
396 default:
397 return armnn::ActivationFunction::Sigmoid;
398 }
399}
400
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100401armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
402{
403 switch (method)
404 {
405 case armnnSerializer::ResizeMethod_NearestNeighbor:
406 return armnn::ResizeMethod::NearestNeighbor;
407 case armnnSerializer::ResizeMethod_Bilinear:
408 return armnn::ResizeMethod::NearestNeighbor;
409 default:
410 return armnn::ResizeMethod::NearestNeighbor;
411 }
412}
413
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000414armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000415{
416 armnn::DataType type;
417 CHECK_TENSOR_PTR(tensorPtr);
418
419 switch (tensorPtr->dataType())
420 {
421 case DataType_QuantisedAsymm8:
422 type = armnn::DataType::QuantisedAsymm8;
423 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000424 case DataType_QuantisedSymm16:
425 type = armnn::DataType::QuantisedSymm16;
426 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000427 case DataType_Signed32:
428 type = armnn::DataType::Signed32;
429 break;
Kevin May43a799c2019-02-08 16:31:42 +0000430 case DataType_Float32:
431 type = armnn::DataType::Float32;
432 break;
433 case DataType_Float16:
434 type = armnn::DataType::Float16;
435 break;
436 case DataType_Boolean:
437 type = armnn::DataType::Boolean;
438 break;
439 default:
440 {
441 CheckLocation location = CHECK_LOCATION();
442 throw ParseException(
443 boost::str(
444 boost::format("Unsupported data type %1% = %2%. %3%") %
445 tensorPtr->dataType() %
446 EnumNameDataType(tensorPtr->dataType()) %
447 location.AsString()));
448 }
449 }
450 float quantizationScale = tensorPtr->quantizationScale();
451 int32_t quantizationOffset = tensorPtr->quantizationOffset();
452
453 auto dimensions = tensorPtr->dimensions();
454 unsigned int size = dimensions->size();
455 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
456
457 // two statements (on purpose) for easier debugging:
458 armnn::TensorInfo result(size,
459 outputDims.data(),
460 type,
461 quantizationScale,
462 quantizationOffset);
463 return result;
464}
465
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000466armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000467{
468 CHECK_CONST_TENSOR_PTR(constTensorPtr);
469 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
470
471 switch (constTensorPtr->data_type())
472 {
473 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000474 {
475 auto byteData = constTensorPtr->data_as_ByteData()->data();
476 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
477 return armnn::ConstTensor(tensorInfo, byteData->data());
478 }
Mike Kellya0766c32019-02-19 17:22:07 +0000479 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000480 {
481 auto shortData = constTensorPtr->data_as_ShortData()->data();
482 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
483 return armnn::ConstTensor(tensorInfo, shortData->data());
484 }
Mike Kellya0766c32019-02-19 17:22:07 +0000485 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000486 {
487 auto intData = constTensorPtr->data_as_IntData()->data();
488 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
489 return armnn::ConstTensor(tensorInfo, intData->data());
490 }
Mike Kellya0766c32019-02-19 17:22:07 +0000491 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000492 {
493 auto longData = constTensorPtr->data_as_LongData()->data();
494 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
495 return armnn::ConstTensor(tensorInfo, longData->data());
496 }
Mike Kellya0766c32019-02-19 17:22:07 +0000497 default:
498 {
499 CheckLocation location = CHECK_LOCATION();
500 throw ParseException(
501 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
502 constTensorPtr->data_type() %
503 EnumNameConstTensorData(constTensorPtr->data_type()) %
504 location.AsString()));
505 }
506 }
507}
508
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000509Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000510 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000511{
512 CHECK_LAYERS(graphPtr, 0, layerIndex);
513 auto layer = GetBaseLayer(graphPtr, layerIndex);
514 const auto& numInputs = layer->inputSlots()->size();
515
516 TensorRawPtrVector result(numInputs);
517
518 for (unsigned int i=0; i<numInputs; ++i)
519 {
520 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
521 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
522 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
523 }
524 return result;
525}
526
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000527Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000528 unsigned int layerIndex)
529{
530 CHECK_LAYERS(graphPtr, 0, layerIndex);
531 auto layer = GetBaseLayer(graphPtr, layerIndex);
532 const auto& numOutputs = layer->outputSlots()->size();
533
534 TensorRawPtrVector result(numOutputs);
535
536 for (unsigned int i=0; i<numOutputs; ++i)
537 {
538 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
539 }
540 return result;
541}
542
Derek Lamberti8ddae332019-02-21 16:29:43 +0000543void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000544{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000545 CHECK_LAYERS(graph, 0, layerIndex);
546 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000547 throw ParseException(
548 boost::str(
549 boost::format("Layer not supported. "
550 "layerIndex: %1% "
551 "layerName: %2% / %3%") %
552 layerIndex %
553 layerName %
554 CHECK_LOCATION().AsString()));
555}
556
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000557void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000558{
559 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000560 m_InputBindings.clear();
561 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000562}
563
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000564IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000565{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000566 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000567}
568
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000569IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000570{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000571 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000572}
573
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000574void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000575{
576 delete parser;
577}
578
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000579INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000580{
581 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000582 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
583 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000584}
585
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000586armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000587{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000588 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000589 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
590 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
591 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000592}
593
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000594Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000595{
596 if (binaryContent == nullptr)
597 {
598 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
599 CHECK_LOCATION().AsString()));
600 }
601 flatbuffers::Verifier verifier(binaryContent, len);
602 if (verifier.VerifyBuffer<SerializedGraph>() == false)
603 {
604 throw ParseException(
605 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
606 "flatbuffers format. size:%1% %2%") %
607 len %
608 CHECK_LOCATION().AsString()));
609 }
610 return GetSerializedGraph(binaryContent);
611}
612
Derek Lamberti8ddae332019-02-21 16:29:43 +0000613INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000614{
615 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000616 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000617 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000618 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000619 {
620 if (layer->layer_type() != Layer_InputLayer &&
621 layer->layer_type() != Layer_OutputLayer)
622 {
623 // lookup and call the parser function
624 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000625 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000626 }
627 ++layerIndex;
628 }
629
Derek Lamberti8ddae332019-02-21 16:29:43 +0000630 SetupInputLayers(graph);
631 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000632
633 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100634 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000635 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100636 Connections& connections = graphIt.second;
637 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000638 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100639 const unsigned int outputSlotIndex = outputIt.first;
640 IOutputSlot* outputSlot = outputIt.second;
641 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000642 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100643 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000644 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100645 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000646 }
Kevin May43a799c2019-02-08 16:31:42 +0000647 }
648 }
649 }
650
651 return std::move(m_Network);
652}
653
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000654BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000655 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000656{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000657 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000658 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000659 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000660 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000661 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000662 }
663 }
664 throw ParseException(
665 boost::str(
666 boost::format("No input binding found for layer:%1% / %2%") %
667 name %
668 CHECK_LOCATION().AsString()));
669}
670
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000671BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000672 const std::string& name) const
673{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000674 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000675 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000676 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000677 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000678 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000679 }
680 }
681 throw ParseException(
682 boost::str(
683 boost::format("No output binding found for layer:%1% / %2%") %
684 name %
685 CHECK_LOCATION().AsString()));
686}
687
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100688unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
689{
690 for (unsigned int i = 0; i < graph->layers()->size(); i++)
691 {
692 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
693 if (layer->index() == targetIndex)
694 {
695 return i;
696 }
697 }
698 throw ParseException("Layer with given index not found");
699}
700
Derek Lamberti8ddae332019-02-21 16:29:43 +0000701void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000702{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000703 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100704 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000705 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100706 m_InputBindings.reserve(numInputs);
707
708 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000709 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100710 const unsigned int inputId = graph->inputIds()->Get(i);
711 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
712 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000713
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100714 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
715 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
716 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000717
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100718 IConnectableLayer* inputLayer =
719 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000720
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100721 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
722 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
723 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
724
Derek Lamberti8ddae332019-02-21 16:29:43 +0000725 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100726 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000727 }
728}
729
Derek Lamberti8ddae332019-02-21 16:29:43 +0000730void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000731{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000732 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100733 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000734 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100735 m_OutputBindings.reserve(numOutputs);
736
737 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000738 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100739 const unsigned int outputId = graph->outputIds()->Get(i);
740 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
741 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000742
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100743 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
744 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
745 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000746
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100747 IConnectableLayer* outputLayer =
748 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000749
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100750 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
751
752 unsigned int sourceLayerIndex =
753 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
754 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
755 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
756
Derek Lamberti8ddae332019-02-21 16:29:43 +0000757 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100758 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000759 }
760}
761
Derek Lamberti8ddae332019-02-21 16:29:43 +0000762void Deserializer::RegisterOutputSlots(GraphPtr graph,
763 uint32_t layerIndex,
764 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000765{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000766 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000767 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100768 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
769 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000770 {
771 throw ParseException(
772 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
773 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100774 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000775 layer->GetNumOutputSlots() %
776 layerIndex %
777 CHECK_LOCATION().AsString()));
778 }
779
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100780 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000781 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100782 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
783 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
784 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
785 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000786 }
787}
788
Derek Lamberti8ddae332019-02-21 16:29:43 +0000789void Deserializer::RegisterInputSlots(GraphPtr graph,
790 uint32_t layerIndex,
791 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000792{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000793 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000794 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100795 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
796 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000797 {
798 throw ParseException(
799 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
800 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100801 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000802 layer->GetNumInputSlots() %
803 layerIndex %
804 CHECK_LOCATION().AsString()));
805 }
806
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100807 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000808 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100809 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
810 auto fbConnection = fbInputSlot->connection();
811 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
812 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000813 }
814}
815
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000816void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
817 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100818 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000819{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100820 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000821 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100822 m_GraphConnections[sourceLayerIndex] = Connections();
823 }
824
825 Connections& connections = m_GraphConnections[sourceLayerIndex];
826 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
827 {
828 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000829 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000830 else
831 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100832 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000833 }
834}
Kevin May43a799c2019-02-08 16:31:42 +0000835
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000836void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100837 uint32_t outputSlotIndex,
838 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000839{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100840 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
841 {
842 m_GraphConnections[sourceLayerIndex] = Connections();
843 }
844
845 Connections& connections = m_GraphConnections[sourceLayerIndex];
846 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
847 {
848 throw ParseException("Same output slot index processed twice");
849 }
850
851 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000852}
853
Derek Lamberti8ddae332019-02-21 16:29:43 +0000854void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000855{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000856 CHECK_LAYERS(graph, 0, layerIndex);
857 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000858 CHECK_LOCATION();
859 CHECK_VALID_SIZE(inputs.size(), 1);
860
Derek Lamberti8ddae332019-02-21 16:29:43 +0000861 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000862 CHECK_VALID_SIZE(outputs.size(), 1);
863
Derek Lamberti8ddae332019-02-21 16:29:43 +0000864 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000865 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000866 auto serializerDescriptor = serializerLayer->descriptor();
867
868 armnn::ActivationDescriptor descriptor;
869 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
870 descriptor.m_A = serializerDescriptor->a();
871 descriptor.m_B = serializerDescriptor->b();
872
873 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
874 layerName.c_str());
875 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
876 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
877
Derek Lamberti8ddae332019-02-21 16:29:43 +0000878 RegisterInputSlots(graph, layerIndex, layer);
879 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000880}
881
Derek Lamberti8ddae332019-02-21 16:29:43 +0000882void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000883{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000884 CHECK_LAYERS(graph, 0, layerIndex);
885 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000886 CHECK_LOCATION();
887 CHECK_VALID_SIZE(inputs.size(), 2);
888
Derek Lamberti8ddae332019-02-21 16:29:43 +0000889 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000890 CHECK_VALID_SIZE(outputs.size(), 1);
891
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000892 auto layerName = GetLayerName(graph, layerIndex);
893 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000894
895 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
897
Derek Lamberti8ddae332019-02-21 16:29:43 +0000898 RegisterInputSlots(graph, layerIndex, layer);
899 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000900}
901
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000902void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
903{
904 CHECK_LAYERS(graph, 0, layerIndex);
905
906 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
907 CHECK_VALID_SIZE(inputs.size(), 1);
908
909 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
910 CHECK_VALID_SIZE(outputs.size(), 1);
911
912 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
913 auto flatBufferCrops = flatBufferDescriptor->crops();
914 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
915
916 if (flatBufferCrops->Length() % 2 != 0)
917 {
918 throw ParseException(boost::str(
919 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
920 }
921
922 std::vector<std::pair<unsigned int, unsigned int>> crops;
923 crops.reserve(flatBufferCrops->Length() / 2);
924 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
925 {
926 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
927 }
928
929 armnn::BatchToSpaceNdDescriptor descriptor;
930 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
931 descriptor.m_BlockShape =
932 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
933 descriptor.m_Crops = crops;
934
935 auto layerName = GetLayerName(graph, layerIndex);
936 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
937
938 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
939 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
940
941 RegisterInputSlots(graph, layerIndex, layer);
942 RegisterOutputSlots(graph, layerIndex, layer);
943}
944
ruoyan018e7fa232019-02-28 15:09:07 +0000945void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
946{
947 CHECK_LAYERS(graph, 0, layerIndex);
948
949 auto inputs = GetInputs(graph, layerIndex);
950 CHECK_VALID_SIZE(inputs.size(), 1);
951
952 auto outputs = GetOutputs(graph, layerIndex);
953 CHECK_VALID_SIZE(outputs.size(), 1);
954 auto outputInfo = ToTensorInfo(outputs[0]);
955
ruoyan015c7ab052019-03-04 14:48:02 +0000956 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000957
958 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
959 auto serializerDescriptor = serializerLayer->descriptor();
960
961 armnn::BatchNormalizationDescriptor descriptor;
962 descriptor.m_Eps = serializerDescriptor->eps();
963 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
964
965 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
966 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
967 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
968 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
969
970 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
971 mean,
972 variance,
973 beta,
974 gamma,
975 layerName.c_str());
976 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
977
978 RegisterInputSlots(graph, layerIndex, layer);
979 RegisterOutputSlots(graph, layerIndex, layer);
980}
981
Conor Kennedy76277882019-02-26 08:29:54 +0000982void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
983{
984 CHECK_LAYERS(graph, 0, layerIndex);
985 CHECK_LOCATION();
986
987 auto outputs = GetOutputs(graph, layerIndex);
988 CHECK_VALID_SIZE(outputs.size(), 1);
989
990 auto layerName = GetLayerName(graph, layerIndex);
991
992 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
993 auto serializerInput = serializerLayer->input();
994
995 armnn::ConstTensor input = ToConstTensor(serializerInput);
996
997 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
998
999 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1000 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1001
1002 RegisterOutputSlots(graph, layerIndex, layer);
1003}
1004
Derek Lamberti8ddae332019-02-21 16:29:43 +00001005void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001006{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001007 CHECK_LAYERS(graph, 0, layerIndex);
1008 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001009 CHECK_LOCATION();
1010 CHECK_VALID_SIZE(inputs.size(), 1);
1011
Derek Lamberti8ddae332019-02-21 16:29:43 +00001012 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001013 CHECK_VALID_SIZE(outputs.size(), 1);
1014
Derek Lamberti8ddae332019-02-21 16:29:43 +00001015 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001016 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001017 auto serializerDescriptor = serializerLayer->descriptor();
1018
1019 armnn::Convolution2dDescriptor descriptor;
1020 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1021 descriptor.m_PadRight = serializerDescriptor->padRight();
1022 descriptor.m_PadTop = serializerDescriptor->padTop();
1023 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1024 descriptor.m_StrideX = serializerDescriptor->strideX();
1025 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001026 descriptor.m_DilationX = serializerDescriptor->dilationX();
1027 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001028 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1029 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1030
1031 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1032 armnn::ConstTensor biases;
1033
Matteo Martincighfc598e12019-05-14 10:36:13 +01001034 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001035 if (descriptor.m_BiasEnabled)
1036 {
1037 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001038 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001039 }
1040 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1041 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001042 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001043 layerName.c_str());
1044 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1045 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1046
Derek Lamberti8ddae332019-02-21 16:29:43 +00001047 RegisterInputSlots(graph, layerIndex, layer);
1048 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001049}
1050
Derek Lamberti8ddae332019-02-21 16:29:43 +00001051void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001052{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001053 CHECK_LAYERS(graph, 0, layerIndex);
1054 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001055 CHECK_LOCATION();
1056 CHECK_VALID_SIZE(inputs.size(), 1);
1057
Derek Lamberti8ddae332019-02-21 16:29:43 +00001058 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001059 CHECK_VALID_SIZE(outputs.size(), 1);
1060
Derek Lamberti8ddae332019-02-21 16:29:43 +00001061 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001062 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001063 auto serializerDescriptor = serializerLayer->descriptor();
1064
1065 armnn::DepthwiseConvolution2dDescriptor descriptor;
1066 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1067 descriptor.m_PadRight = serializerDescriptor->padRight();
1068 descriptor.m_PadTop = serializerDescriptor->padTop();
1069 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1070 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001071 descriptor.m_StrideY = serializerDescriptor->strideY();
1072 descriptor.m_DilationX = serializerDescriptor->dilationX();
1073 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001074 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1075 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1076
1077 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1078 armnn::ConstTensor biases;
1079
Matteo Martincighfc598e12019-05-14 10:36:13 +01001080 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001081 if (descriptor.m_BiasEnabled)
1082 {
1083 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001084 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001085 }
1086 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1087 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001088 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001089 layerName.c_str());
1090
1091 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1092 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1093
Derek Lamberti8ddae332019-02-21 16:29:43 +00001094 RegisterInputSlots(graph, layerIndex, layer);
1095 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001096}
1097
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001098void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1099{
1100 CHECK_LAYERS(graph, 0, layerIndex);
1101 auto inputs = GetInputs(graph, layerIndex);
1102 CHECK_LOCATION();
1103 CHECK_VALID_SIZE(inputs.size(), 2);
1104
1105 auto outputs = GetOutputs(graph, layerIndex);
1106 CHECK_VALID_SIZE(outputs.size(), 4);
1107
1108 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1109 auto layerName = GetLayerName(graph, layerIndex);
1110 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1111
1112 armnn::DetectionPostProcessDescriptor descriptor;
1113 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1114 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1115 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1116 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1117 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1118 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1119 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1120 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1121 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1122 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1123 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1124
1125 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1126
1127 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1128 anchors,
1129 layerName.c_str());
1130
1131 for (unsigned int i = 0; i < 4; i++)
1132 {
1133 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1134 }
1135
1136 RegisterInputSlots(graph, layerIndex, layer);
1137 RegisterOutputSlots(graph, layerIndex, layer);
1138}
1139
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001140void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1141{
1142 CHECK_LAYERS(graph, 0, layerIndex);
1143 auto inputs = GetInputs(graph, layerIndex);
1144 CHECK_LOCATION();
1145 CHECK_VALID_SIZE(inputs.size(), 2);
1146
1147 auto outputs = GetOutputs(graph, layerIndex);
1148 CHECK_VALID_SIZE(outputs.size(), 1);
1149
1150 auto layerName = GetLayerName(graph, layerIndex);
1151 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1152
1153 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1154 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1155
1156 RegisterInputSlots(graph, layerIndex, layer);
1157 RegisterOutputSlots(graph, layerIndex, layer);
1158}
1159
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001160void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1161{
1162 CHECK_LAYERS(graph, 0, layerIndex);
1163 auto inputs = GetInputs(graph, layerIndex);
1164 CHECK_LOCATION();
1165 CHECK_VALID_SIZE(inputs.size(), 2);
1166
1167 auto outputs = GetOutputs(graph, layerIndex);
1168 CHECK_VALID_SIZE(outputs.size(), 1);
1169
1170 auto layerName = GetLayerName(graph, layerIndex);
1171 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1172
1173 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1174 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1175
1176 RegisterInputSlots(graph, layerIndex, layer);
1177 RegisterOutputSlots(graph, layerIndex, layer);
1178}
1179
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001180void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1181{
1182 CHECK_LAYERS(graph, 0, layerIndex);
1183 auto inputs = GetInputs(graph, layerIndex);
1184 CHECK_LOCATION();
1185 CHECK_VALID_SIZE(inputs.size(), 2);
1186
1187 auto outputs = GetOutputs(graph, layerIndex);
1188 CHECK_VALID_SIZE(outputs.size(), 1);
1189
1190 auto layerName = GetLayerName(graph, layerIndex);
1191 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1192
1193 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1194 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1195
1196 RegisterInputSlots(graph, layerIndex, layer);
1197 RegisterOutputSlots(graph, layerIndex, layer);
1198}
1199
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001200void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1201{
1202 CHECK_LAYERS(graph, 0, layerIndex);
1203
1204 auto inputs = GetInputs(graph, layerIndex);
1205 CHECK_VALID_SIZE(inputs.size(), 1);
1206
1207 auto outputs = GetOutputs(graph, layerIndex);
1208 CHECK_VALID_SIZE(outputs.size(), 1);
1209 auto outputInfo = ToTensorInfo(outputs[0]);
1210
1211 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1212 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1213
1214 auto layerName = GetLayerName(graph, layerIndex);
1215 armnn::L2NormalizationDescriptor descriptor;
1216 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001217 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001218
1219 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1220 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1221
1222 RegisterInputSlots(graph, layerIndex, layer);
1223 RegisterOutputSlots(graph, layerIndex, layer);
1224}
1225
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001226void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1227{
1228 CHECK_LAYERS(graph, 0, layerIndex);
1229 auto inputs = GetInputs(graph, layerIndex);
1230 CHECK_LOCATION();
1231 CHECK_VALID_SIZE(inputs.size(), 2);
1232
1233 auto outputs = GetOutputs(graph, layerIndex);
1234 CHECK_VALID_SIZE(outputs.size(), 1);
1235
1236 auto layerName = GetLayerName(graph, layerIndex);
1237 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1238
1239 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1240 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1241
1242 RegisterInputSlots(graph, layerIndex, layer);
1243 RegisterOutputSlots(graph, layerIndex, layer);
1244}
1245
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001246void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1247{
1248 CHECK_LAYERS(graph, 0, layerIndex);
1249 auto inputs = GetInputs(graph, layerIndex);
1250 CHECK_LOCATION();
1251 CHECK_VALID_SIZE(inputs.size(), 2);
1252
1253 auto outputs = GetOutputs(graph, layerIndex);
1254 CHECK_VALID_SIZE(outputs.size(), 1);
1255
1256 auto layerName = GetLayerName(graph, layerIndex);
1257 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1258
1259 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1260 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1261
1262 RegisterInputSlots(graph, layerIndex, layer);
1263 RegisterOutputSlots(graph, layerIndex, layer);
1264}
1265
Jim Flynne242f2d2019-05-22 14:24:13 +01001266const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1267 unsigned int layerIndex)
1268{
1269 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1270
1271 switch (layerType)
1272 {
1273 case Layer::Layer_ConcatLayer:
1274 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1275 case Layer::Layer_MergerLayer:
1276 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1277 default:
1278 throw armnn::Exception("unknown layer type, should be concat or merger");
1279 }
1280}
1281
Jim Flynn906f9462019-05-10 13:55:21 +01001282void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001283{
1284 CHECK_LAYERS(graph, 0, layerIndex);
1285 CHECK_LOCATION();
1286
1287 auto outputs = GetOutputs(graph, layerIndex);
1288 CHECK_VALID_SIZE(outputs.size(), 1);
1289
Jim Flynnac25a1b2019-02-28 10:40:49 +00001290 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001291 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1292 unsigned int numViews = originsDescriptor->numViews();
1293 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001294
1295 // can now check the number of inputs == number of views
1296 auto inputs = GetInputs(graph, layerIndex);
1297 CHECK_VALID_SIZE(inputs.size(), numViews);
1298
1299 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001300 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001301 for (unsigned int v = 0; v < numViews; ++v)
1302 {
1303 auto originPtr = originsPtr->Get(v);
1304 for (unsigned int d = 0; d < numDimensions; ++d)
1305 {
1306 uint32_t value = originPtr->data()->Get(d);
1307 descriptor.SetViewOriginCoord(v, d, value);
1308 }
1309 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001310 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001311
Jim Flynn906f9462019-05-10 13:55:21 +01001312 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001313 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1314 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1315
1316 RegisterInputSlots(graph, layerIndex, layer);
1317 RegisterOutputSlots(graph, layerIndex, layer);
1318}
1319
Derek Lamberti8ddae332019-02-21 16:29:43 +00001320void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001321{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001322 CHECK_LAYERS(graph, 0, layerIndex);
1323 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001324 CHECK_LOCATION();
1325 CHECK_VALID_SIZE(inputs.size(), 2);
1326
Derek Lamberti8ddae332019-02-21 16:29:43 +00001327 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001328 CHECK_VALID_SIZE(outputs.size(), 1);
1329
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001330 auto layerName = GetLayerName(graph, layerIndex);
1331 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001332
1333 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1334 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1335
Derek Lamberti8ddae332019-02-21 16:29:43 +00001336 RegisterInputSlots(graph, layerIndex, layer);
1337 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001338}
1339
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001340void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1341{
1342 CHECK_LAYERS(graph, 0, layerIndex);
1343 CHECK_LOCATION();
1344
1345 auto inputs = GetInputs(graph, layerIndex);
1346 CHECK_VALID_SIZE(inputs.size(), 1);
1347
1348 auto outputs = GetOutputs(graph, layerIndex);
1349 CHECK_VALID_SIZE(outputs.size(), 1);
1350
1351 auto layerName = GetLayerName(graph, layerIndex);
1352
1353 armnn::IConnectableLayer* layer;
1354
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001355 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001356
1357 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1358 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1359
1360 RegisterInputSlots(graph, layerIndex, layer);
1361 RegisterOutputSlots(graph, layerIndex, layer);
1362}
1363
Derek Lamberti8ddae332019-02-21 16:29:43 +00001364void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001365{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001366 CHECK_LAYERS(graph, 0, layerIndex);
1367 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001368 CHECK_LOCATION();
1369 CHECK_VALID_SIZE(inputs.size(), 1);
1370
Derek Lamberti8ddae332019-02-21 16:29:43 +00001371 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001372 CHECK_VALID_SIZE(outputs.size(), 1);
1373
Derek Lamberti8ddae332019-02-21 16:29:43 +00001374 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001375 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001376 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1377
1378 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1379 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1380 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1381
1382 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1383
1384 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001385 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001386 if (flatBufferDescriptor->biasEnabled())
1387 {
1388 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001389 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001390 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001391 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1392 weightsTensor,
1393 optionalBiases,
1394 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001395
1396 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1397 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1398
Derek Lamberti8ddae332019-02-21 16:29:43 +00001399 RegisterInputSlots(graph, layerIndex, layer);
1400 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001401}
1402
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001403void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1404{
1405 CHECK_LAYERS(graph, 0, layerIndex);
1406
1407 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1408 CHECK_VALID_SIZE(inputs.size(), 1);
1409
1410 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1411 CHECK_VALID_SIZE(outputs.size(), 1);
1412
1413 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1414 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001415 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001416
1417 if (flatBufferPadList->Length() % 2 != 0)
1418 {
1419 throw ParseException(boost::str(
1420 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1421 }
1422
1423 std::vector<std::pair<unsigned int, unsigned int>> padList;
1424 padList.reserve(flatBufferPadList->Length() / 2);
1425 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1426 {
1427 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1428 }
1429
David Monahan34757812019-06-19 11:47:21 +01001430 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001431
1432 auto layerName = GetLayerName(graph, layerIndex);
1433 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1434
1435 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1436 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1437
1438 RegisterInputSlots(graph, layerIndex, layer);
1439 RegisterOutputSlots(graph, layerIndex, layer);
1440}
1441
Derek Lamberti8ddae332019-02-21 16:29:43 +00001442void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001443{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001444 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001445
1446 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001447 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001448
Derek Lamberti8ddae332019-02-21 16:29:43 +00001449 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001450 CHECK_VALID_SIZE(inputs.size(), 1);
1451
Derek Lamberti8ddae332019-02-21 16:29:43 +00001452 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001453 CHECK_VALID_SIZE(outputs.size(), 1);
1454 auto outputInfo = ToTensorInfo(outputs[0]);
1455
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001456 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001457 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1458
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001459 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001460 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1461
Derek Lamberti8ddae332019-02-21 16:29:43 +00001462 RegisterInputSlots(graph, layerIndex, layer);
1463 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001464}
1465
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001466armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001467 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001468{
1469 armnn::Pooling2dDescriptor desc;
1470
1471 switch (pooling2dDesc->poolType())
1472 {
1473 case PoolingAlgorithm_Average:
1474 {
1475 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001476 break;
1477 }
1478 case PoolingAlgorithm_Max:
1479 {
1480 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001481 break;
1482 }
1483 default:
1484 {
1485 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1486 }
1487 }
1488
1489 switch (pooling2dDesc->outputShapeRounding())
1490 {
1491 case OutputShapeRounding_Floor:
1492 {
1493 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1494 break;
1495 }
1496 case OutputShapeRounding_Ceiling:
1497 {
1498 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1499 break;
1500 }
1501 default:
1502 {
1503 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1504 }
1505 }
1506
1507 switch (pooling2dDesc->paddingMethod())
1508 {
1509 case PaddingMethod_Exclude:
1510 {
1511 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1512 break;
1513 }
1514 case PaddingMethod_IgnoreValue:
1515 {
1516 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1517 break;
1518 }
1519 default:
1520 {
1521 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1522 }
1523 }
1524
1525 switch (pooling2dDesc->dataLayout())
1526 {
1527 case DataLayout_NCHW:
1528 {
1529 desc.m_DataLayout = armnn::DataLayout::NCHW;
1530 break;
1531 }
1532 case DataLayout_NHWC:
1533 {
1534 desc.m_DataLayout = armnn::DataLayout::NHWC;
1535 break;
1536 }
1537 default:
1538 {
1539 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1540 }
1541 }
1542
1543 desc.m_PadRight = pooling2dDesc->padRight();
1544 desc.m_PadLeft = pooling2dDesc->padLeft();
1545 desc.m_PadBottom = pooling2dDesc->padBottom();
1546 desc.m_PadTop = pooling2dDesc->padTop();
1547 desc.m_StrideX = pooling2dDesc->strideX();
1548 desc.m_StrideY = pooling2dDesc->strideY();
1549 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1550 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1551
1552 return desc;
1553}
1554
Derek Lamberti8ddae332019-02-21 16:29:43 +00001555void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001556{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001557 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001558
Derek Lamberti8ddae332019-02-21 16:29:43 +00001559 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001560 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001561 CHECK_VALID_SIZE(inputs.size(), 1);
1562
Derek Lamberti8ddae332019-02-21 16:29:43 +00001563 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001564 CHECK_VALID_SIZE(outputs.size(), 1);
1565 auto outputInfo = ToTensorInfo(outputs[0]);
1566
1567 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001568 auto layerName = GetLayerName(graph, layerIndex);
1569 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001570 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1571
Derek Lamberti8ddae332019-02-21 16:29:43 +00001572 RegisterInputSlots(graph, layerIndex, layer);
1573 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001574}
1575
Derek Lamberti87acb272019-03-27 16:51:31 +00001576void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1577{
1578 CHECK_LAYERS(graph, 0, layerIndex);
1579
1580 auto inputs = GetInputs(graph, layerIndex);
1581 CHECK_VALID_SIZE(inputs.size(), 1);
1582
1583 auto outputs = GetOutputs(graph, layerIndex);
1584 CHECK_VALID_SIZE(outputs.size(), 1);
1585 auto outputInfo = ToTensorInfo(outputs[0]);
1586
1587 auto layerName = GetLayerName(graph, layerIndex);
1588 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1589 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1590
1591 RegisterInputSlots(graph, layerIndex, layer);
1592 RegisterOutputSlots(graph, layerIndex, layer);
1593}
1594
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001595armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001596 const std::vector<uint32_t>& targetDimsIn)
1597{
1598 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1599 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1600
1601 if (stretchDim != targetDimsIn.end())
1602 {
1603 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1604 {
1605 throw ParseException(boost::str(
1606 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1607 }
1608
1609 auto targetNumElements =
1610 boost::numeric_cast<unsigned int>(
1611 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1612
1613 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1614 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1615 }
1616
1617 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1618
1619 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1620 reshapeInfo.SetShape(outputShape);
1621
1622 return reshapeInfo;
1623}
1624
Derek Lamberti8ddae332019-02-21 16:29:43 +00001625void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001626{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001627 CHECK_LAYERS(graph, 0, layerIndex);
1628 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001629
Derek Lamberti8ddae332019-02-21 16:29:43 +00001630 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001631 CHECK_VALID_SIZE(outputs.size(), 1);
1632
1633 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1634 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1635
Derek Lamberti8ddae332019-02-21 16:29:43 +00001636 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001637 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1638
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001639 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001640 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1641
1642 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1643 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1644
1645 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1646 {
1647 std::stringstream ss;
1648 ss << "New shape defined in reshape parameters "
1649 << reshapeOutputTensorShape
1650 << " does not equal output shape "
1651 << actualOutputTensorInfo.GetShape()
1652 << ": "
1653 << CHECK_LOCATION().AsString();
1654 throw ParseException(ss.str());
1655 }
1656
1657 armnn::ReshapeDescriptor reshapeDesc;
1658 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1659
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001660 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001661 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1662 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1663
Derek Lamberti8ddae332019-02-21 16:29:43 +00001664 RegisterInputSlots(graph, layerIndex, layer);
1665 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001666}
1667
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01001668void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
1669{
1670 CHECK_LAYERS(graph, 0, layerIndex);
1671
1672 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1673 CHECK_VALID_SIZE(inputs.size(), 1);
1674
1675 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1676 CHECK_VALID_SIZE(outputs.size(), 1);
1677
1678 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
1679
1680 armnn::ResizeDescriptor descriptor;
1681 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1682 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1683 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
1684 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1685
1686 auto layerName = GetLayerName(graph, layerIndex);
1687 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
1688
1689 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1690 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1691
1692 RegisterInputSlots(graph, layerIndex, layer);
1693 RegisterOutputSlots(graph, layerIndex, layer);
1694}
1695
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001696void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1697{
1698 CHECK_LAYERS(graph, 0, layerIndex);
1699
1700 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1701 CHECK_VALID_SIZE(inputs.size(), 1);
1702
1703 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1704 CHECK_VALID_SIZE(outputs.size(), 1);
1705
1706 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1707
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001708 armnn::ResizeDescriptor descriptor;
1709 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001710 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001711 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
1712 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001713
1714 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001715 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001716
1717 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1718 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1719
1720 RegisterInputSlots(graph, layerIndex, layer);
1721 RegisterOutputSlots(graph, layerIndex, layer);
1722}
1723
Derek Lamberti8ddae332019-02-21 16:29:43 +00001724void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001725{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001726 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001727
Derek Lamberti8ddae332019-02-21 16:29:43 +00001728 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001729 CHECK_VALID_SIZE(inputs.size(), 1);
1730
Derek Lamberti8ddae332019-02-21 16:29:43 +00001731 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001732 CHECK_VALID_SIZE(outputs.size(), 1);
1733
1734 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001735 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001736 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001737
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001738 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1739
1740 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1741 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1742
Derek Lamberti8ddae332019-02-21 16:29:43 +00001743 RegisterInputSlots(graph, layerIndex, layer);
1744 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001745}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001746
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001747void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1748{
1749 CHECK_LAYERS(graph, 0, layerIndex);
1750
1751 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1752 CHECK_VALID_SIZE(inputs.size(), 1);
1753
1754 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1755 CHECK_VALID_SIZE(outputs.size(), 1);
1756
1757 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1758 auto flatBufferPadList = flatBufferDescriptor->padList();
1759 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1760
1761 if (flatBufferPadList->Length() % 2 != 0)
1762 {
1763 throw ParseException(boost::str(
1764 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1765 }
1766
1767 std::vector<std::pair<unsigned int, unsigned int>> padList;
1768 padList.reserve(flatBufferPadList->Length() / 2);
1769 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1770 {
1771 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1772 }
1773
1774 armnn::SpaceToBatchNdDescriptor descriptor;
1775 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1776 descriptor.m_BlockShape =
1777 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1778 descriptor.m_PadList = padList;
1779
1780 auto layerName = GetLayerName(graph, layerIndex);
1781 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1782
1783 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1784 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1785
1786 RegisterInputSlots(graph, layerIndex, layer);
1787 RegisterOutputSlots(graph, layerIndex, layer);
1788}
1789
Aron Virginas-Taraa067142019-06-11 16:01:44 +01001790void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
1791{
1792 CHECK_LAYERS(graph, 0, layerIndex);
1793
1794 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1795 CHECK_VALID_SIZE(inputs.size(), 1);
1796
1797 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1798 CHECK_VALID_SIZE(outputs.size(), 1);
1799
1800 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
1801
1802 armnn::SpaceToDepthDescriptor descriptor;
1803 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
1804 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1805
1806 auto layerName = GetLayerName(graph, layerIndex);
1807 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
1808
1809 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1810 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1811
1812 RegisterInputSlots(graph, layerIndex, layer);
1813 RegisterOutputSlots(graph, layerIndex, layer);
1814}
1815
Nina Drozd57728782019-02-27 10:53:27 +00001816armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1817 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1818 unsigned int layerIndex)
1819{
1820 armnn::NormalizationDescriptor desc;
1821
1822 switch (normalizationDescriptor->normChannelType())
1823 {
1824 case NormalizationAlgorithmChannel_Across:
1825 {
1826 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1827 break;
1828 }
1829 case NormalizationAlgorithmChannel_Within:
1830 {
1831 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1832 break;
1833 }
1834 default:
1835 {
1836 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1837 }
1838 }
1839
1840 switch (normalizationDescriptor->normMethodType())
1841 {
1842 case NormalizationAlgorithmMethod_LocalBrightness:
1843 {
1844 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1845 break;
1846 }
1847 case NormalizationAlgorithmMethod_LocalContrast:
1848 {
1849 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1850 break;
1851 }
1852 default:
1853 {
1854 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1855 }
1856 }
1857
1858 switch (normalizationDescriptor->dataLayout())
1859 {
1860 case DataLayout_NCHW:
1861 {
1862 desc.m_DataLayout = armnn::DataLayout::NCHW;
1863 break;
1864 }
1865 case DataLayout_NHWC:
1866 {
1867 desc.m_DataLayout = armnn::DataLayout::NHWC;
1868 break;
1869 }
1870 default:
1871 {
1872 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1873 }
1874 }
1875
1876 desc.m_Alpha = normalizationDescriptor->alpha();
1877 desc.m_Beta = normalizationDescriptor->beta();
1878 desc.m_K = normalizationDescriptor->k();
1879 desc.m_NormSize = normalizationDescriptor->normSize();
1880
1881 return desc;
1882}
1883
1884void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1885{
1886 CHECK_LAYERS(graph, 0, layerIndex);
1887
1888 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1889
1890 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1891 CHECK_VALID_SIZE(inputs.size(), 1);
1892
1893 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1894 CHECK_VALID_SIZE(outputs.size(), 1);
1895
1896 auto outputInfo = ToTensorInfo(outputs[0]);
1897
1898 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1899 auto layerName = GetLayerName(graph, layerIndex);
1900
1901 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1902 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1903
1904 RegisterInputSlots(graph, layerIndex, layer);
1905 RegisterOutputSlots(graph, layerIndex, layer);
1906}
1907
Sadik Armagan8b42a382019-03-01 14:24:49 +00001908void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1909{
1910 CHECK_LAYERS(graph, 0, layerIndex);
1911 auto inputs = GetInputs(graph, layerIndex);
1912 CHECK_LOCATION();
1913 CHECK_VALID_SIZE(inputs.size(), 1);
1914
1915 auto outputs = GetOutputs(graph, layerIndex);
1916 CHECK_VALID_SIZE(outputs.size(), 1);
1917
1918 auto layerName = GetLayerName(graph, layerIndex);
1919 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1920
1921 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1922 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1923
1924 RegisterInputSlots(graph, layerIndex, layer);
1925 RegisterOutputSlots(graph, layerIndex, layer);
1926}
1927
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001928void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1929{
1930 CHECK_LAYERS(graph, 0, layerIndex);
1931
1932 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1933 CHECK_VALID_SIZE(inputs.size(), 1);
1934
1935 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1936 CHECK_VALID_SIZE(outputs.size(), 1);
1937
1938 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1939
1940 auto flatBufferBegin = flatBufferDescriptor->begin();
1941 auto flatBufferEnd = flatBufferDescriptor->end();
1942 auto flatBufferStride = flatBufferDescriptor->stride();
1943
1944 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1945 flatBufferBegin->Length() == flatBufferStride->Length()))
1946 {
1947 throw ParseException(boost::str(
1948 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1949 }
1950
1951 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1952 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1953 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1954
1955 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1956 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1957 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1958 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1959 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1960 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1961 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1962
1963 auto layerName = GetLayerName(graph, layerIndex);
1964 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1965
1966 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1967 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1968
1969 RegisterInputSlots(graph, layerIndex, layer);
1970 RegisterOutputSlots(graph, layerIndex, layer);
1971}
1972
Conor Kennedyda1f9752019-03-01 14:37:12 +00001973void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1974{
1975 CHECK_LAYERS(graph, 0, layerIndex);
1976 auto inputs = GetInputs(graph, layerIndex);
1977 CHECK_LOCATION();
1978 CHECK_VALID_SIZE(inputs.size(), 2);
1979
1980 auto outputs = GetOutputs(graph, layerIndex);
1981 CHECK_VALID_SIZE(outputs.size(), 1);
1982
1983 auto layerName = GetLayerName(graph, layerIndex);
1984 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1985
1986 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1987 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1988
1989 RegisterInputSlots(graph, layerIndex, layer);
1990 RegisterOutputSlots(graph, layerIndex, layer);
1991}
1992
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001993void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1994{
1995 CHECK_LAYERS(graph, 0, layerIndex);
1996
1997 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1998 CHECK_VALID_SIZE(inputs.size(), 2);
1999
2000 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2001 CHECK_VALID_SIZE(outputs.size(), 1);
2002
2003 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002004 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2005
2006 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002007 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2008
2009 RegisterInputSlots(graph, layerIndex, layer);
2010 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002011}
2012
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002013void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2014{
2015 CHECK_LAYERS(graph, 0, layerIndex);
2016
2017 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2018 CHECK_VALID_SIZE(inputs.size(), 1);
2019
2020 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2021 CHECK_VALID_SIZE(outputs.size(), 1);
2022
2023 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2024 auto flatBufferAxis = flatBufferDescriptor->axis();
2025 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2026
2027 armnn::MeanDescriptor descriptor;
2028 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2029 descriptor.m_KeepDims = flatBufferKeepDims;
2030
2031 auto layerName = GetLayerName(graph, layerIndex);
2032 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2033
2034 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2035 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2036
2037 RegisterInputSlots(graph, layerIndex, layer);
2038 RegisterOutputSlots(graph, layerIndex, layer);
2039}
2040
Jim Flynn18ce3382019-03-08 11:08:30 +00002041void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2042{
2043 CHECK_LAYERS(graph, 0, layerIndex);
2044
2045 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2046 CHECK_VALID_SIZE(inputs.size(), 1);
2047
2048 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2049
2050 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2051 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2052 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2053 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2054 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2055 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2056
2057 // Check numViews and numDimensions corresponds to the ones already serialized ...
2058 // numViews == flatBufferViewSizes.size();
2059 // foreach: numDimensions == flatBufferViewSizes[x].size();
2060
2061 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2062 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2063 {
2064 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2065 {
2066 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2067 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2068 }
2069 }
2070
2071 auto layerName = GetLayerName(graph, layerIndex);
2072 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2073
2074 // I could have as many outputs as views ...
2075 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2076 {
2077 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2078 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2079 }
2080
2081 RegisterInputSlots(graph, layerIndex, layer);
2082 RegisterOutputSlots(graph, layerIndex, layer);
2083}
2084
Jim Flynn11af3752019-03-19 17:22:29 +00002085armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2086{
2087 armnn::LstmDescriptor desc;
2088
2089 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2090 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2091 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2092 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2093 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2094 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002095 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002096
2097 return desc;
2098}
2099
2100void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2101{
2102 CHECK_LAYERS(graph, 0, layerIndex);
2103
2104 auto inputs = GetInputs(graph, layerIndex);
2105 CHECK_VALID_SIZE(inputs.size(), 3);
2106
2107 auto outputs = GetOutputs(graph, layerIndex);
2108 CHECK_VALID_SIZE(outputs.size(), 4);
2109
2110 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2111 auto layerName = GetLayerName(graph, layerIndex);
2112 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2113 auto flatBufferInputParams = flatBufferLayer->inputParams();
2114
2115 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2116
2117 armnn::LstmInputParams lstmInputParams;
2118
2119 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2120 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2121 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2122 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2123 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2124 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2125 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2126 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2127 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2128
2129 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2130 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2131 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2132 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2133 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2134 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2135 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2136 lstmInputParams.m_CellBias = &cellBias;
2137 lstmInputParams.m_OutputGateBias = &outputGateBias;
2138
2139 armnn::ConstTensor inputToInputWeights;
2140 armnn::ConstTensor recurrentToInputWeights;
2141 armnn::ConstTensor cellToInputWeights;
2142 armnn::ConstTensor inputGateBias;
2143 if (!lstmDescriptor.m_CifgEnabled)
2144 {
2145 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2146 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2147 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2148 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2149
2150 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2151 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2152 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2153 lstmInputParams.m_InputGateBias = &inputGateBias;
2154 }
2155
2156 armnn::ConstTensor projectionWeights;
2157 armnn::ConstTensor projectionBias;
2158 if (lstmDescriptor.m_ProjectionEnabled)
2159 {
2160 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2161 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2162
2163 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2164 lstmInputParams.m_ProjectionBias = &projectionBias;
2165 }
2166
2167 armnn::ConstTensor cellToForgetWeights;
2168 armnn::ConstTensor cellToOutputWeights;
2169 if (lstmDescriptor.m_PeepholeEnabled)
2170 {
2171 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2172 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2173
2174 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2175 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2176 }
2177
Jan Eilersf8c62972019-07-17 11:07:49 +01002178 armnn::ConstTensor inputLayerNormWeights;
2179 armnn::ConstTensor forgetLayerNormWeights;
2180 armnn::ConstTensor cellLayerNormWeights;
2181 armnn::ConstTensor outputLayerNormWeights;
2182 if (lstmDescriptor.m_LayerNormEnabled)
2183 {
2184 if (!lstmDescriptor.m_CifgEnabled)
2185 {
2186 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2187 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2188 }
2189 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2190 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2191 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2192
2193 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2194 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2195 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2196 }
2197
Jim Flynn11af3752019-03-19 17:22:29 +00002198 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2199
2200 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2201 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2202
2203 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2204 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2205
2206 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2207 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2208
2209 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2210 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2211
2212 RegisterInputSlots(graph, layerIndex, layer);
2213 RegisterOutputSlots(graph, layerIndex, layer);
2214}
2215
Jan Eilers5b01a892019-07-23 09:47:43 +01002216void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2217{
2218 CHECK_LAYERS(graph, 0, layerIndex);
2219
2220 auto inputs = GetInputs(graph, layerIndex);
2221 CHECK_VALID_SIZE(inputs.size(), 3);
2222
2223 auto outputs = GetOutputs(graph, layerIndex);
2224 CHECK_VALID_SIZE(outputs.size(), 2);
2225
2226 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2227 auto layerName = GetLayerName(graph, layerIndex);
2228 auto flatBufferInputParams = flatBufferLayer->inputParams();
2229
2230 armnn::QuantizedLstmInputParams lstmInputParams;
2231
2232 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2233 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2234 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2235 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2236 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2237 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2238 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2239 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2240 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2241 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2242 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2243 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2244
2245 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2246 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2247 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2248 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2249 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2250 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2251 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2252 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2253 lstmInputParams.m_InputGateBias = &inputGateBias;
2254 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2255 lstmInputParams.m_CellBias = &cellBias;
2256 lstmInputParams.m_OutputGateBias = &outputGateBias;
2257
2258 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2259
2260 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2261 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2262
2263 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2264 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2265
2266 RegisterInputSlots(graph, layerIndex, layer);
2267 RegisterOutputSlots(graph, layerIndex, layer);
2268}
2269
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002270void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2271{
2272 CHECK_LAYERS(graph, 0, layerIndex);
2273
2274 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2275 CHECK_VALID_SIZE(inputs.size(), 1);
2276
2277 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2278 CHECK_VALID_SIZE(outputs.size(), 1);
2279
2280 const std::string layerName = GetLayerName(graph, layerIndex);
2281 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2282
2283 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2284 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2285
2286 RegisterInputSlots(graph, layerIndex, layer);
2287 RegisterOutputSlots(graph, layerIndex, layer);
2288}
2289
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002290void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2291{
2292 CHECK_LAYERS(graph, 0, layerIndex);
2293
2294 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2295 CHECK_VALID_SIZE(inputs.size(), 2);
2296
2297 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2298 CHECK_VALID_SIZE(outputs.size(), 1);
2299
2300 const std::string layerName = GetLayerName(graph, layerIndex);
2301 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2302
2303 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2304 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2305
2306 RegisterInputSlots(graph, layerIndex, layer);
2307 RegisterOutputSlots(graph, layerIndex, layer);
2308}
2309
Sadik Armaganeff363d2019-04-05 15:25:46 +01002310void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2311{
2312 CHECK_LAYERS(graph, 0, layerIndex);
2313 auto inputs = GetInputs(graph, layerIndex);
2314 CHECK_LOCATION();
2315 CHECK_VALID_SIZE(inputs.size(), 2);
2316
2317 auto outputs = GetOutputs(graph, layerIndex);
2318 CHECK_VALID_SIZE(outputs.size(), 2);
2319
2320 auto layerName = GetLayerName(graph, layerIndex);
2321 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2322
2323 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2324 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2325
2326 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2327 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2328
2329 RegisterInputSlots(graph, layerIndex, layer);
2330 RegisterOutputSlots(graph, layerIndex, layer);
2331}
2332
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002333void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2334{
2335 CHECK_LAYERS(graph, 0, layerIndex);
2336 auto inputs = GetInputs(graph, layerIndex);
2337 CHECK_LOCATION();
2338 CHECK_VALID_SIZE(inputs.size(), 2);
2339
2340 auto outputs = GetOutputs(graph, layerIndex);
2341 CHECK_VALID_SIZE(outputs.size(), 1);
2342
2343 auto layerName = GetLayerName(graph, layerIndex);
2344 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2345
2346 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2347 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2348
2349 RegisterInputSlots(graph, layerIndex, layer);
2350 RegisterOutputSlots(graph, layerIndex, layer);
2351}
2352
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002353void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2354{
2355 CHECK_LAYERS(graph, 0, layerIndex);
2356
2357 auto inputs = GetInputs(graph, layerIndex);
2358 CHECK_VALID_SIZE(inputs.size(), 1);
2359
2360 auto outputs = GetOutputs(graph, layerIndex);
2361 CHECK_VALID_SIZE(outputs.size(), 1);
2362
2363 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2364 auto layerName = GetLayerName(graph, layerIndex);
2365 auto serializerDescriptor = serializerLayer->descriptor();
2366
2367 armnn::TransposeConvolution2dDescriptor descriptor;
2368 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2369 descriptor.m_PadRight = serializerDescriptor->padRight();
2370 descriptor.m_PadTop = serializerDescriptor->padTop();
2371 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2372 descriptor.m_StrideX = serializerDescriptor->strideX();
2373 descriptor.m_StrideY = serializerDescriptor->strideY();;
2374 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2375 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2376
2377 // weights & biases
2378 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2379 armnn::Optional<armnn::ConstTensor> optionalBiases;
2380 if (descriptor.m_BiasEnabled)
2381 {
2382 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2383 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2384 }
2385
2386 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2387 weights,
2388 optionalBiases,
2389 layerName.c_str());
2390
2391 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2392 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2393
2394 RegisterInputSlots(graph, layerIndex, layer);
2395 RegisterOutputSlots(graph, layerIndex, layer);
2396}
2397
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002398void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2399{
2400 CHECK_LAYERS(graph, 0, layerIndex);
2401 auto inputs = GetInputs(graph, layerIndex);
2402
2403 auto outputs = GetOutputs(graph, layerIndex);
2404 CHECK_VALID_SIZE(outputs.size(), 1);
2405
2406 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2407 unsigned int axis = flatBufferDescriptor->axis();
2408 unsigned int numInputs = flatBufferDescriptor->numInputs();
2409 CHECK_VALID_SIZE(inputs.size(), numInputs);
2410
2411 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2412 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2413 flatBufferInputShape->begin() + flatBufferInputShape->size());
2414
2415 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2416 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2417
2418 for (unsigned int i=0; i<inputs.size(); ++i)
2419 {
2420 armnn::TensorShape& inputShape = ToTensorInfo(inputs[i]).GetShape();
2421 if (descriptor.m_InputShape != inputShape)
2422 {
2423 std::stringstream ss;
2424 ss << "Shape of input "
2425 << i
2426 << " "
2427 << inputShape
2428 << " does not equal defined input shape "
2429 << descriptor.m_InputShape
2430 << ": "
2431 << CHECK_LOCATION().AsString();
2432 throw ParseException(ss.str());
2433 }
2434 }
2435
2436 auto layerName = GetLayerName(graph, layerIndex);
2437 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2438
2439 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2440 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2441
2442 RegisterInputSlots(graph, layerIndex, layer);
2443 RegisterOutputSlots(graph, layerIndex, layer);
2444}
2445
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002446} // namespace armnnDeserializer