blob: 943c6a7fed3d20706f54d683b8bd864e15083f7d [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000021#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022
23// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000024#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
26#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038namespace
39{
Kevin May43a799c2019-02-08 16:31:42 +000040
41const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
42
Derek Lamberti0028d1b2019-02-20 13:57:42 +000043 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000044 unsigned int layersIndex,
45 const CheckLocation& location)
46{
47 if (graph->layers() == nullptr)
48 {
49 throw ParseException(
50 boost::str(
51 boost::format("%1% was called with invalid (null) graph. "
52 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
53 "layers:%2% at %3%") %
54 location.m_Function %
55 layersIndex %
56 location.FileLine()));
57 }
58 else if (layersIndex >= graph->layers()->size())
59 {
60 throw ParseException(
61 boost::str(
62 boost::format("%1% was called with an invalid layers index. "
63 "layers:%2% at %3%") %
64 location.m_Function %
65 layersIndex %
66 location.FileLine()));
67 }
68}
69
Derek Lamberti0028d1b2019-02-20 13:57:42 +000070void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000071 unsigned int layersIndex,
72 unsigned int layerIndex,
73 const CheckLocation& location)
74{
75 if (graph->layers() == nullptr)
76 {
77 throw ParseException(
78 boost::str(
79 boost::format("%1% was called with invalid (null) graph. "
80 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000081 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000082 location.m_Function %
83 layersIndex %
84 location.FileLine()));
85 }
86 else if (layersIndex >= graph->layers()->size())
87 {
88 throw ParseException(
89 boost::str(
90 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000091 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000092 location.m_Function %
93 layersIndex %
94 location.FileLine()));
95 }
96 else if (layerIndex >= graph->layers()[layersIndex].size()
97 && layerIndex != VIRTUAL_LAYER_ID)
98 {
99 throw ParseException(
100 boost::str(
101 boost::format("%1% was called with an invalid layer index. "
102 "layers:%2% layer:%3% at %4%") %
103 location.m_Function %
104 layersIndex %
105 layerIndex %
106 location.FileLine()));
107 }
108}
109
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000110void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000111 const CheckLocation& location)
112{
113 if (rawPtr == nullptr)
114 {
115 throw ParseException(
116 boost::str(
117 boost::format("%1% was called with a null tensor pointer. "
118 "at %2%") %
119 location.m_Function %
120 location.FileLine()));
121
122 }
123}
124
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000125void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000126 const CheckLocation& location)
127{
128 if (rawPtr == nullptr)
129 {
130 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
131 location.m_Function %
132 location.FileLine()));
133 }
134}
135
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000136void CheckConstTensorSize(const unsigned int constTensorSize,
137 const unsigned int tensorSize,
138 const CheckLocation& location)
139{
140 if (constTensorSize != tensorSize)
141 {
142 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
143 location.m_Function %
144 location.FileLine()));
145 }
146}
147
Kevin May43a799c2019-02-08 16:31:42 +0000148#define CHECK_TENSOR_PTR(TENSOR_PTR) \
149 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
150
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000151#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
152 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
153
Mike Kellya0766c32019-02-19 17:22:07 +0000154#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
155 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
156
Kevin May43a799c2019-02-08 16:31:42 +0000157#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
158 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
159
160#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
161 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
162}
163
Saoirse Stewart263829c2019-02-19 15:54:14 +0000164bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
165{
166 const unsigned int actualSize = actual.GetNumDimensions();
167 if (actualSize != expected.size())
168 {
169 return false;
170 }
171
172 for (unsigned int i = 0u; i < actualSize; i++)
173 {
174 if (actual[i] != static_cast<unsigned int>(expected[i]))
175 {
176 return false;
177 }
178 }
179
180 return true;
181}
182
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000183Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000184: m_Network(nullptr, nullptr),
185//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000186m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000187{
188 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000191 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000192 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000193 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
195 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000196 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000197 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000198 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000199 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000200 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000201 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000202 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000203 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000204 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000205 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000206 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000207 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
208 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Jim Flynnac25a1b2019-02-28 10:40:49 +0000209 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseMerger;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000210 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000211 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000212 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000213 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000214 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Derek Lamberti87acb272019-03-27 16:51:31 +0000215 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000216 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000217 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000218 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000220 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Jim Flynn18ce3382019-03-08 11:08:30 +0000221 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000222 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000223 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Kevin May43a799c2019-02-08 16:31:42 +0000224}
225
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000226Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000227{
228 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
229
230 switch(layerType)
231 {
Mike Kellyaf484012019-02-20 16:53:11 +0000232 case Layer::Layer_ActivationLayer:
233 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000234 case Layer::Layer_AdditionLayer:
235 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000236 case Layer::Layer_BatchToSpaceNdLayer:
237 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000238 case Layer::Layer_BatchNormalizationLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000240 case Layer::Layer_ConstantLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000242 case Layer::Layer_Convolution2dLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000244 case Layer::Layer_DepthwiseConvolution2dLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000246 case Layer::Layer_DequantizeLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000248 case Layer::Layer_DetectionPostProcessLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000250 case Layer::Layer_DivisionLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000252 case Layer::Layer_EqualLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000254 case Layer::Layer_FullyConnectedLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000256 case Layer::Layer_FloorLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000258 case Layer::Layer_GatherLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000260 case Layer::Layer_GreaterLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000262 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000263 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000264 case Layer::Layer_L2NormalizationLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000266 case Layer::Layer_LstmLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000268 case Layer::Layer_MeanLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000270 case Layer::Layer_MinimumLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000272 case Layer::Layer_MaximumLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000274 case Layer::Layer_MergerLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000276 case Layer::Layer_MultiplicationLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000278 case Layer::Layer_NormalizationLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000280 case Layer::Layer_OutputLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000282 case Layer::Layer_PadLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000284 case Layer::Layer_PermuteLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000286 case Layer::Layer_Pooling2dLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000288 case Layer::Layer_QuantizeLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000290 case Layer::Layer_ReshapeLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000292 case Layer::Layer_ResizeBilinearLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000294 case Layer::Layer_RsqrtLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000296 case Layer::Layer_SoftmaxLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000298 case Layer::Layer_SpaceToBatchNdLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000300 case Layer::Layer_SplitterLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000302 case Layer::Layer_StridedSliceLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000304 case Layer::Layer_SubtractionLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000306 case Layer::Layer_NONE:
307 default:
308 throw ParseException(boost::str(
309 boost::format("Layer must have a type %1%") %
310 Layer::Layer_NONE));
311 }
312}
313
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000314std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
315{
316 auto layer = GetBaseLayer(graph, index);
317 assert(layer);
318 return layer->layerName()->str();
319}
320
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000321int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000322{
323 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
324
325 if (layerType == Layer::Layer_InputLayer)
326 {
327 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
328 }
329 else if ( layerType == Layer::Layer_OutputLayer )
330 {
331 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
332 }
333 return 0;
334}
335
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000336armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000337{
338 switch (dataLayout)
339 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000340 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000341 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000342 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000343 default:
344 return armnn::DataLayout::NCHW;
345 }
346}
347
Mike Kellyaf484012019-02-20 16:53:11 +0000348armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
349{
350 switch (function)
351 {
352 case armnnSerializer::ActivationFunction_Sigmoid:
353 return armnn::ActivationFunction::Sigmoid;
354 case armnnSerializer::ActivationFunction_TanH:
355 return armnn::ActivationFunction::TanH;
356 case armnnSerializer::ActivationFunction_Linear:
357 return armnn::ActivationFunction::Linear;
358 case armnnSerializer::ActivationFunction_ReLu:
359 return armnn::ActivationFunction::ReLu;
360 case armnnSerializer::ActivationFunction_BoundedReLu:
361 return armnn::ActivationFunction::BoundedReLu;
362 case armnnSerializer::ActivationFunction_LeakyReLu:
363 return armnn::ActivationFunction::LeakyReLu;
364 case armnnSerializer::ActivationFunction_Abs:
365 return armnn::ActivationFunction::Abs;
366 case armnnSerializer::ActivationFunction_Sqrt:
367 return armnn::ActivationFunction::Sqrt;
368 case armnnSerializer::ActivationFunction_Square:
369 return armnn::ActivationFunction::Square;
370 default:
371 return armnn::ActivationFunction::Sigmoid;
372 }
373}
374
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000375armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000376{
377 armnn::DataType type;
378 CHECK_TENSOR_PTR(tensorPtr);
379
380 switch (tensorPtr->dataType())
381 {
382 case DataType_QuantisedAsymm8:
383 type = armnn::DataType::QuantisedAsymm8;
384 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000385 case DataType_QuantisedSymm16:
386 type = armnn::DataType::QuantisedSymm16;
387 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000388 case DataType_Signed32:
389 type = armnn::DataType::Signed32;
390 break;
Kevin May43a799c2019-02-08 16:31:42 +0000391 case DataType_Float32:
392 type = armnn::DataType::Float32;
393 break;
394 case DataType_Float16:
395 type = armnn::DataType::Float16;
396 break;
397 case DataType_Boolean:
398 type = armnn::DataType::Boolean;
399 break;
400 default:
401 {
402 CheckLocation location = CHECK_LOCATION();
403 throw ParseException(
404 boost::str(
405 boost::format("Unsupported data type %1% = %2%. %3%") %
406 tensorPtr->dataType() %
407 EnumNameDataType(tensorPtr->dataType()) %
408 location.AsString()));
409 }
410 }
411 float quantizationScale = tensorPtr->quantizationScale();
412 int32_t quantizationOffset = tensorPtr->quantizationOffset();
413
414 auto dimensions = tensorPtr->dimensions();
415 unsigned int size = dimensions->size();
416 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
417
418 // two statements (on purpose) for easier debugging:
419 armnn::TensorInfo result(size,
420 outputDims.data(),
421 type,
422 quantizationScale,
423 quantizationOffset);
424 return result;
425}
426
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000427armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000428{
429 CHECK_CONST_TENSOR_PTR(constTensorPtr);
430 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
431
432 switch (constTensorPtr->data_type())
433 {
434 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000435 {
436 auto byteData = constTensorPtr->data_as_ByteData()->data();
437 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
438 return armnn::ConstTensor(tensorInfo, byteData->data());
439 }
Mike Kellya0766c32019-02-19 17:22:07 +0000440 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000441 {
442 auto shortData = constTensorPtr->data_as_ShortData()->data();
443 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
444 return armnn::ConstTensor(tensorInfo, shortData->data());
445 }
Mike Kellya0766c32019-02-19 17:22:07 +0000446 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000447 {
448 auto intData = constTensorPtr->data_as_IntData()->data();
449 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
450 return armnn::ConstTensor(tensorInfo, intData->data());
451 }
Mike Kellya0766c32019-02-19 17:22:07 +0000452 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000453 {
454 auto longData = constTensorPtr->data_as_LongData()->data();
455 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
456 return armnn::ConstTensor(tensorInfo, longData->data());
457 }
Mike Kellya0766c32019-02-19 17:22:07 +0000458 default:
459 {
460 CheckLocation location = CHECK_LOCATION();
461 throw ParseException(
462 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
463 constTensorPtr->data_type() %
464 EnumNameConstTensorData(constTensorPtr->data_type()) %
465 location.AsString()));
466 }
467 }
468}
469
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000470Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000471{
472
473 CHECK_GRAPH(graphPtr, 0);
474 const auto& numInputs = graphPtr->inputIds()->size();
475
476 LayerBaseRawPtrVector result(numInputs);
477
478 for (unsigned int i=0; i<numInputs; ++i)
479 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000480 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000481 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
482 }
483 return result;
484}
485
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000486Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000487{
488 CHECK_GRAPH(graphPtr, 0);
489 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000490 LayerBaseRawPtrVector result(numOutputs);
491
492 for (unsigned int i=0; i<numOutputs; ++i)
493 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000494 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000495
Kevin May43a799c2019-02-08 16:31:42 +0000496 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
497 }
498 return result;
499}
500
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000501Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000502 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000503{
504 CHECK_LAYERS(graphPtr, 0, layerIndex);
505 auto layer = GetBaseLayer(graphPtr, layerIndex);
506 const auto& numInputs = layer->inputSlots()->size();
507
508 TensorRawPtrVector result(numInputs);
509
510 for (unsigned int i=0; i<numInputs; ++i)
511 {
512 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
513 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
514 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
515 }
516 return result;
517}
518
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000519Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000520 unsigned int layerIndex)
521{
522 CHECK_LAYERS(graphPtr, 0, layerIndex);
523 auto layer = GetBaseLayer(graphPtr, layerIndex);
524 const auto& numOutputs = layer->outputSlots()->size();
525
526 TensorRawPtrVector result(numOutputs);
527
528 for (unsigned int i=0; i<numOutputs; ++i)
529 {
530 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
531 }
532 return result;
533}
534
Derek Lamberti8ddae332019-02-21 16:29:43 +0000535void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000536{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000537 CHECK_LAYERS(graph, 0, layerIndex);
538 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000539 throw ParseException(
540 boost::str(
541 boost::format("Layer not supported. "
542 "layerIndex: %1% "
543 "layerName: %2% / %3%") %
544 layerIndex %
545 layerName %
546 CHECK_LOCATION().AsString()));
547}
548
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000549void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000550{
551 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000552 m_InputBindings.clear();
553 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000554}
555
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000556IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000557{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000558 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000559}
560
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000561IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000562{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000563 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000564}
565
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000566void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000567{
568 delete parser;
569}
570
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000571INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000572{
573 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000574 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
575 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000576}
577
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000578armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000579{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000580 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000581 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
582 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
583 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000584}
585
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000586Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000587{
588 if (binaryContent == nullptr)
589 {
590 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
591 CHECK_LOCATION().AsString()));
592 }
593 flatbuffers::Verifier verifier(binaryContent, len);
594 if (verifier.VerifyBuffer<SerializedGraph>() == false)
595 {
596 throw ParseException(
597 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
598 "flatbuffers format. size:%1% %2%") %
599 len %
600 CHECK_LOCATION().AsString()));
601 }
602 return GetSerializedGraph(binaryContent);
603}
604
Derek Lamberti8ddae332019-02-21 16:29:43 +0000605INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000606{
607 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000608 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000609 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000610 m_GraphConnections.emplace_back(graph->layers()->size());
611 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000612 {
613 if (layer->layer_type() != Layer_InputLayer &&
614 layer->layer_type() != Layer_OutputLayer)
615 {
616 // lookup and call the parser function
617 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000618 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000619 }
620 ++layerIndex;
621 }
622
Derek Lamberti8ddae332019-02-21 16:29:43 +0000623 SetupInputLayers(graph);
624 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000625
626 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000627 for (size_t connectionsIndex = 0; connectionsIndex < m_GraphConnections[0].size(); ++connectionsIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000628 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000629 SlotsMap& slotsMap = m_GraphConnections[0][connectionsIndex];
630 for (unsigned int outputSlotIndex = 0; outputSlotIndex < slotsMap.outputSlots.size(); outputSlotIndex++)
Kevin May43a799c2019-02-08 16:31:42 +0000631 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000632 if (slotsMap.inputSlots.find(outputSlotIndex) != slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000633 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000634 for (armnn::IInputSlot* inputSlot : slotsMap.inputSlots[outputSlotIndex])
635 {
636 slotsMap.outputSlots[outputSlotIndex]->Connect(*inputSlot);
637 }
Kevin May43a799c2019-02-08 16:31:42 +0000638 }
639 }
640 }
641
642 return std::move(m_Network);
643}
644
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000645BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000646 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000647{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000648 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000649 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000650 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000651 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000652 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000653 }
654 }
655 throw ParseException(
656 boost::str(
657 boost::format("No input binding found for layer:%1% / %2%") %
658 name %
659 CHECK_LOCATION().AsString()));
660}
661
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000662BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000663 const std::string& name) const
664{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000665 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000666 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000667 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000668 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000669 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000670 }
671 }
672 throw ParseException(
673 boost::str(
674 boost::format("No output binding found for layer:%1% / %2%") %
675 name %
676 CHECK_LOCATION().AsString()));
677}
678
Derek Lamberti8ddae332019-02-21 16:29:43 +0000679void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000680{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000681 CHECK_GRAPH(graph, 0);
682 auto inputs = GetGraphInputs(graph);
683 m_InputBindings.clear();
684 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000685 for (auto const& input : inputs)
686 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000687 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000688 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000689 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000690
691 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
692 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
693
Derek Lamberti8ddae332019-02-21 16:29:43 +0000694 RegisterOutputSlots(graph, input->index(), layer);
695
696 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
697 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
698 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000699 }
700}
701
Derek Lamberti8ddae332019-02-21 16:29:43 +0000702void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000703{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000704 CHECK_GRAPH(graph, 0);
705 auto outputs = GetGraphOutputs(graph);
706 m_OutputBindings.clear();
707 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000708 for (auto const& output : outputs)
709 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000710 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000711 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000712 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000713
Derek Lamberti8ddae332019-02-21 16:29:43 +0000714 RegisterInputSlots(graph, output->index(), layer);
715
716 auto baseLayer = GetBaseLayer(graph, output->index());
717 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
718 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
719 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
720
721 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
722 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
723 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000724 }
725}
726
Derek Lamberti8ddae332019-02-21 16:29:43 +0000727void Deserializer::RegisterOutputSlots(GraphPtr graph,
728 uint32_t layerIndex,
729 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000730{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000731 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000732 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000733 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000734 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
735 {
736 throw ParseException(
737 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
738 " for layer index: %3% %4%") %
739 parsedLayer->outputSlots()->size() %
740 layer->GetNumOutputSlots() %
741 layerIndex %
742 CHECK_LOCATION().AsString()));
743 }
744
745 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
746 {
747 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
748 RegisterOutputSlotOfConnection(layerIndex, slot);
749 }
750}
751
Derek Lamberti8ddae332019-02-21 16:29:43 +0000752void Deserializer::RegisterInputSlots(GraphPtr graph,
753 uint32_t layerIndex,
754 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000755{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000756 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000757 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000758 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000759 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
760 {
761 throw ParseException(
762 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
763 " for layer index:%3% %4%") %
764 parsedLayer->inputSlots()->size() %
765 layer->GetNumInputSlots() %
766 layerIndex %
767 CHECK_LOCATION().AsString()));
768 }
769
770 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
771 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000772 auto fbConnection = parsedLayer->inputSlots()->Get(slotIndex)->connection();
Kevin May43a799c2019-02-08 16:31:42 +0000773 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000774
775 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), slot);
Kevin May43a799c2019-02-08 16:31:42 +0000776 }
777}
778
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000779void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
780 uint32_t outputSlotIndex,
781 armnn::IInputSlot* slot)
Kevin May43a799c2019-02-08 16:31:42 +0000782{
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000783 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000784
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000785 SlotsMap& slotsMap = m_GraphConnections[0][sourceLayerIndex];
786 if (slotsMap.inputSlots.find(outputSlotIndex) == slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000787 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000788 slotsMap.inputSlots[outputSlotIndex] = {slot};
Kevin May43a799c2019-02-08 16:31:42 +0000789 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000790 else
791 {
792 slotsMap.inputSlots[outputSlotIndex].push_back(slot);
793 }
794}
Kevin May43a799c2019-02-08 16:31:42 +0000795
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000796void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
797 armnn::IOutputSlot* slot)
798{
799 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
800 m_GraphConnections[0][sourceLayerIndex].outputSlots.push_back(slot);
Kevin May43a799c2019-02-08 16:31:42 +0000801}
802
Derek Lamberti8ddae332019-02-21 16:29:43 +0000803void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000804{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000805 CHECK_LAYERS(graph, 0, layerIndex);
806 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000807 CHECK_LOCATION();
808 CHECK_VALID_SIZE(inputs.size(), 1);
809
Derek Lamberti8ddae332019-02-21 16:29:43 +0000810 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000811 CHECK_VALID_SIZE(outputs.size(), 1);
812
Derek Lamberti8ddae332019-02-21 16:29:43 +0000813 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000814 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000815 auto serializerDescriptor = serializerLayer->descriptor();
816
817 armnn::ActivationDescriptor descriptor;
818 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
819 descriptor.m_A = serializerDescriptor->a();
820 descriptor.m_B = serializerDescriptor->b();
821
822 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
823 layerName.c_str());
824 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
825 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
826
Derek Lamberti8ddae332019-02-21 16:29:43 +0000827 RegisterInputSlots(graph, layerIndex, layer);
828 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000829}
830
Derek Lamberti8ddae332019-02-21 16:29:43 +0000831void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000832{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000833 CHECK_LAYERS(graph, 0, layerIndex);
834 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000835 CHECK_LOCATION();
836 CHECK_VALID_SIZE(inputs.size(), 2);
837
Derek Lamberti8ddae332019-02-21 16:29:43 +0000838 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000839 CHECK_VALID_SIZE(outputs.size(), 1);
840
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000841 auto layerName = GetLayerName(graph, layerIndex);
842 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000843
844 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
845 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
846
Derek Lamberti8ddae332019-02-21 16:29:43 +0000847 RegisterInputSlots(graph, layerIndex, layer);
848 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000849}
850
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000851void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
852{
853 CHECK_LAYERS(graph, 0, layerIndex);
854
855 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
856 CHECK_VALID_SIZE(inputs.size(), 1);
857
858 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
859 CHECK_VALID_SIZE(outputs.size(), 1);
860
861 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
862 auto flatBufferCrops = flatBufferDescriptor->crops();
863 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
864
865 if (flatBufferCrops->Length() % 2 != 0)
866 {
867 throw ParseException(boost::str(
868 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
869 }
870
871 std::vector<std::pair<unsigned int, unsigned int>> crops;
872 crops.reserve(flatBufferCrops->Length() / 2);
873 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
874 {
875 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
876 }
877
878 armnn::BatchToSpaceNdDescriptor descriptor;
879 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
880 descriptor.m_BlockShape =
881 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
882 descriptor.m_Crops = crops;
883
884 auto layerName = GetLayerName(graph, layerIndex);
885 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
886
887 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
888 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
889
890 RegisterInputSlots(graph, layerIndex, layer);
891 RegisterOutputSlots(graph, layerIndex, layer);
892}
893
ruoyan018e7fa232019-02-28 15:09:07 +0000894void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
895{
896 CHECK_LAYERS(graph, 0, layerIndex);
897
898 auto inputs = GetInputs(graph, layerIndex);
899 CHECK_VALID_SIZE(inputs.size(), 1);
900
901 auto outputs = GetOutputs(graph, layerIndex);
902 CHECK_VALID_SIZE(outputs.size(), 1);
903 auto outputInfo = ToTensorInfo(outputs[0]);
904
ruoyan015c7ab052019-03-04 14:48:02 +0000905 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000906
907 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
908 auto serializerDescriptor = serializerLayer->descriptor();
909
910 armnn::BatchNormalizationDescriptor descriptor;
911 descriptor.m_Eps = serializerDescriptor->eps();
912 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
913
914 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
915 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
916 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
917 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
918
919 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
920 mean,
921 variance,
922 beta,
923 gamma,
924 layerName.c_str());
925 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
926
927 RegisterInputSlots(graph, layerIndex, layer);
928 RegisterOutputSlots(graph, layerIndex, layer);
929}
930
Conor Kennedy76277882019-02-26 08:29:54 +0000931void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
932{
933 CHECK_LAYERS(graph, 0, layerIndex);
934 CHECK_LOCATION();
935
936 auto outputs = GetOutputs(graph, layerIndex);
937 CHECK_VALID_SIZE(outputs.size(), 1);
938
939 auto layerName = GetLayerName(graph, layerIndex);
940
941 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
942 auto serializerInput = serializerLayer->input();
943
944 armnn::ConstTensor input = ToConstTensor(serializerInput);
945
946 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
947
948 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
949 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
950
951 RegisterOutputSlots(graph, layerIndex, layer);
952}
953
Derek Lamberti8ddae332019-02-21 16:29:43 +0000954void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000955{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000956 CHECK_LAYERS(graph, 0, layerIndex);
957 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000958 CHECK_LOCATION();
959 CHECK_VALID_SIZE(inputs.size(), 1);
960
Derek Lamberti8ddae332019-02-21 16:29:43 +0000961 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000962 CHECK_VALID_SIZE(outputs.size(), 1);
963
Derek Lamberti8ddae332019-02-21 16:29:43 +0000964 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000965 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000966 auto serializerDescriptor = serializerLayer->descriptor();
967
968 armnn::Convolution2dDescriptor descriptor;
969 descriptor.m_PadLeft = serializerDescriptor->padLeft();
970 descriptor.m_PadRight = serializerDescriptor->padRight();
971 descriptor.m_PadTop = serializerDescriptor->padTop();
972 descriptor.m_PadBottom = serializerDescriptor->padBottom();
973 descriptor.m_StrideX = serializerDescriptor->strideX();
974 descriptor.m_StrideY = serializerDescriptor->strideY();;
975 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
976 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
977
978 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
979 armnn::ConstTensor biases;
980
981 if (descriptor.m_BiasEnabled)
982 {
983 biases = ToConstTensor(serializerLayer->biases());
984 }
985 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
986 weights,
987 biases,
988 layerName.c_str());
989 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
990 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
991
Derek Lamberti8ddae332019-02-21 16:29:43 +0000992 RegisterInputSlots(graph, layerIndex, layer);
993 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000994}
995
Derek Lamberti8ddae332019-02-21 16:29:43 +0000996void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000997{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000998 CHECK_LAYERS(graph, 0, layerIndex);
999 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001000 CHECK_LOCATION();
1001 CHECK_VALID_SIZE(inputs.size(), 1);
1002
Derek Lamberti8ddae332019-02-21 16:29:43 +00001003 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001004 CHECK_VALID_SIZE(outputs.size(), 1);
1005
Derek Lamberti8ddae332019-02-21 16:29:43 +00001006 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001007 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001008 auto serializerDescriptor = serializerLayer->descriptor();
1009
1010 armnn::DepthwiseConvolution2dDescriptor descriptor;
1011 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1012 descriptor.m_PadRight = serializerDescriptor->padRight();
1013 descriptor.m_PadTop = serializerDescriptor->padTop();
1014 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1015 descriptor.m_StrideX = serializerDescriptor->strideX();
1016 descriptor.m_StrideY = serializerDescriptor->strideY();;
1017 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1018 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1019
1020 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1021 armnn::ConstTensor biases;
1022
1023 if (descriptor.m_BiasEnabled)
1024 {
1025 biases = ToConstTensor(serializerLayer->biases());
1026 }
1027 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1028 weights,
1029 biases,
1030 layerName.c_str());
1031
1032 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1033 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1034
Derek Lamberti8ddae332019-02-21 16:29:43 +00001035 RegisterInputSlots(graph, layerIndex, layer);
1036 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001037}
1038
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001039void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1040{
1041 CHECK_LAYERS(graph, 0, layerIndex);
1042 auto inputs = GetInputs(graph, layerIndex);
1043 CHECK_LOCATION();
1044 CHECK_VALID_SIZE(inputs.size(), 2);
1045
1046 auto outputs = GetOutputs(graph, layerIndex);
1047 CHECK_VALID_SIZE(outputs.size(), 4);
1048
1049 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1050 auto layerName = GetLayerName(graph, layerIndex);
1051 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1052
1053 armnn::DetectionPostProcessDescriptor descriptor;
1054 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1055 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1056 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1057 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1058 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1059 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1060 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1061 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1062 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1063 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1064 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1065
1066 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1067
1068 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1069 anchors,
1070 layerName.c_str());
1071
1072 for (unsigned int i = 0; i < 4; i++)
1073 {
1074 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1075 }
1076
1077 RegisterInputSlots(graph, layerIndex, layer);
1078 RegisterOutputSlots(graph, layerIndex, layer);
1079}
1080
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001081void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1082{
1083 CHECK_LAYERS(graph, 0, layerIndex);
1084 auto inputs = GetInputs(graph, layerIndex);
1085 CHECK_LOCATION();
1086 CHECK_VALID_SIZE(inputs.size(), 2);
1087
1088 auto outputs = GetOutputs(graph, layerIndex);
1089 CHECK_VALID_SIZE(outputs.size(), 1);
1090
1091 auto layerName = GetLayerName(graph, layerIndex);
1092 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1093
1094 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1095 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1096
1097 RegisterInputSlots(graph, layerIndex, layer);
1098 RegisterOutputSlots(graph, layerIndex, layer);
1099}
1100
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001101void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1102{
1103 CHECK_LAYERS(graph, 0, layerIndex);
1104 auto inputs = GetInputs(graph, layerIndex);
1105 CHECK_LOCATION();
1106 CHECK_VALID_SIZE(inputs.size(), 2);
1107
1108 auto outputs = GetOutputs(graph, layerIndex);
1109 CHECK_VALID_SIZE(outputs.size(), 1);
1110
1111 auto layerName = GetLayerName(graph, layerIndex);
1112 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1113
1114 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1115 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1116
1117 RegisterInputSlots(graph, layerIndex, layer);
1118 RegisterOutputSlots(graph, layerIndex, layer);
1119}
1120
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001121void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1122{
1123 CHECK_LAYERS(graph, 0, layerIndex);
1124 auto inputs = GetInputs(graph, layerIndex);
1125 CHECK_LOCATION();
1126 CHECK_VALID_SIZE(inputs.size(), 2);
1127
1128 auto outputs = GetOutputs(graph, layerIndex);
1129 CHECK_VALID_SIZE(outputs.size(), 1);
1130
1131 auto layerName = GetLayerName(graph, layerIndex);
1132 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1133
1134 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1135 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1136
1137 RegisterInputSlots(graph, layerIndex, layer);
1138 RegisterOutputSlots(graph, layerIndex, layer);
1139}
1140
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001141void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1142{
1143 CHECK_LAYERS(graph, 0, layerIndex);
1144
1145 auto inputs = GetInputs(graph, layerIndex);
1146 CHECK_VALID_SIZE(inputs.size(), 1);
1147
1148 auto outputs = GetOutputs(graph, layerIndex);
1149 CHECK_VALID_SIZE(outputs.size(), 1);
1150 auto outputInfo = ToTensorInfo(outputs[0]);
1151
1152 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1153 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1154
1155 auto layerName = GetLayerName(graph, layerIndex);
1156 armnn::L2NormalizationDescriptor descriptor;
1157 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1158
1159 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1160 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1161
1162 RegisterInputSlots(graph, layerIndex, layer);
1163 RegisterOutputSlots(graph, layerIndex, layer);
1164}
1165
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001166void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1167{
1168 CHECK_LAYERS(graph, 0, layerIndex);
1169 auto inputs = GetInputs(graph, layerIndex);
1170 CHECK_LOCATION();
1171 CHECK_VALID_SIZE(inputs.size(), 2);
1172
1173 auto outputs = GetOutputs(graph, layerIndex);
1174 CHECK_VALID_SIZE(outputs.size(), 1);
1175
1176 auto layerName = GetLayerName(graph, layerIndex);
1177 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1178
1179 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1180 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1181
1182 RegisterInputSlots(graph, layerIndex, layer);
1183 RegisterOutputSlots(graph, layerIndex, layer);
1184}
1185
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001186void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1187{
1188 CHECK_LAYERS(graph, 0, layerIndex);
1189 auto inputs = GetInputs(graph, layerIndex);
1190 CHECK_LOCATION();
1191 CHECK_VALID_SIZE(inputs.size(), 2);
1192
1193 auto outputs = GetOutputs(graph, layerIndex);
1194 CHECK_VALID_SIZE(outputs.size(), 1);
1195
1196 auto layerName = GetLayerName(graph, layerIndex);
1197 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1198
1199 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1200 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1201
1202 RegisterInputSlots(graph, layerIndex, layer);
1203 RegisterOutputSlots(graph, layerIndex, layer);
1204}
1205
Jim Flynnac25a1b2019-02-28 10:40:49 +00001206void Deserializer::ParseMerger(GraphPtr graph, unsigned int layerIndex)
1207{
1208 CHECK_LAYERS(graph, 0, layerIndex);
1209 CHECK_LOCATION();
1210
1211 auto outputs = GetOutputs(graph, layerIndex);
1212 CHECK_VALID_SIZE(outputs.size(), 1);
1213
1214 auto mergerLayer = graph->layers()->Get(layerIndex)->layer_as_MergerLayer();
1215 auto layerName = GetLayerName(graph, layerIndex);
1216 auto mergerDescriptor = mergerLayer->descriptor();
1217 unsigned int numViews = mergerDescriptor->numViews();
1218 unsigned int numDimensions = mergerDescriptor->numDimensions();
1219
1220 // can now check the number of inputs == number of views
1221 auto inputs = GetInputs(graph, layerIndex);
1222 CHECK_VALID_SIZE(inputs.size(), numViews);
1223
1224 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
1225 auto originsPtr = mergerDescriptor->viewOrigins();
1226 for (unsigned int v = 0; v < numViews; ++v)
1227 {
1228 auto originPtr = originsPtr->Get(v);
1229 for (unsigned int d = 0; d < numDimensions; ++d)
1230 {
1231 uint32_t value = originPtr->data()->Get(d);
1232 descriptor.SetViewOriginCoord(v, d, value);
1233 }
1234 }
1235 descriptor.SetConcatAxis(mergerDescriptor->concatAxis());
1236
1237 IConnectableLayer* layer = m_Network->AddMergerLayer(descriptor, layerName.c_str());
1238 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1239 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1240
1241 RegisterInputSlots(graph, layerIndex, layer);
1242 RegisterOutputSlots(graph, layerIndex, layer);
1243}
1244
Derek Lamberti8ddae332019-02-21 16:29:43 +00001245void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001246{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001247 CHECK_LAYERS(graph, 0, layerIndex);
1248 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001249 CHECK_LOCATION();
1250 CHECK_VALID_SIZE(inputs.size(), 2);
1251
Derek Lamberti8ddae332019-02-21 16:29:43 +00001252 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001253 CHECK_VALID_SIZE(outputs.size(), 1);
1254
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001255 auto layerName = GetLayerName(graph, layerIndex);
1256 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001257
1258 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1259 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1260
Derek Lamberti8ddae332019-02-21 16:29:43 +00001261 RegisterInputSlots(graph, layerIndex, layer);
1262 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001263}
1264
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001265void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1266{
1267 CHECK_LAYERS(graph, 0, layerIndex);
1268 CHECK_LOCATION();
1269
1270 auto inputs = GetInputs(graph, layerIndex);
1271 CHECK_VALID_SIZE(inputs.size(), 1);
1272
1273 auto outputs = GetOutputs(graph, layerIndex);
1274 CHECK_VALID_SIZE(outputs.size(), 1);
1275
1276 auto layerName = GetLayerName(graph, layerIndex);
1277
1278 armnn::IConnectableLayer* layer;
1279
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001280 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001281
1282 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1283 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1284
1285 RegisterInputSlots(graph, layerIndex, layer);
1286 RegisterOutputSlots(graph, layerIndex, layer);
1287}
1288
Derek Lamberti8ddae332019-02-21 16:29:43 +00001289void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001290{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001291 CHECK_LAYERS(graph, 0, layerIndex);
1292 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001293 CHECK_LOCATION();
1294 CHECK_VALID_SIZE(inputs.size(), 1);
1295
Derek Lamberti8ddae332019-02-21 16:29:43 +00001296 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001297 CHECK_VALID_SIZE(outputs.size(), 1);
1298
Derek Lamberti8ddae332019-02-21 16:29:43 +00001299 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001300 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001301 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1302
1303 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1304 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1305 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1306
1307 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1308
1309 armnn::IConnectableLayer* layer;
1310 if (flatBufferDescriptor->biasEnabled())
1311 {
1312 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1313 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1314 weightsTensor,
1315 biasTensorData,
1316 layerName.c_str());
1317 }
1318 else
1319 {
1320 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1321 weightsTensor,
1322 layerName.c_str());
1323 }
1324
1325 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1326 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1327
Derek Lamberti8ddae332019-02-21 16:29:43 +00001328 RegisterInputSlots(graph, layerIndex, layer);
1329 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001330}
1331
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001332void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1333{
1334 CHECK_LAYERS(graph, 0, layerIndex);
1335
1336 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1337 CHECK_VALID_SIZE(inputs.size(), 1);
1338
1339 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1340 CHECK_VALID_SIZE(outputs.size(), 1);
1341
1342 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1343 auto flatBufferPadList = flatBufferDescriptor->padList();
1344
1345 if (flatBufferPadList->Length() % 2 != 0)
1346 {
1347 throw ParseException(boost::str(
1348 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1349 }
1350
1351 std::vector<std::pair<unsigned int, unsigned int>> padList;
1352 padList.reserve(flatBufferPadList->Length() / 2);
1353 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1354 {
1355 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1356 }
1357
1358 armnn::PadDescriptor descriptor(padList);
1359
1360 auto layerName = GetLayerName(graph, layerIndex);
1361 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1362
1363 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1364 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1365
1366 RegisterInputSlots(graph, layerIndex, layer);
1367 RegisterOutputSlots(graph, layerIndex, layer);
1368}
1369
Derek Lamberti8ddae332019-02-21 16:29:43 +00001370void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001371{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001372 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001373
1374 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001375 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001376
Derek Lamberti8ddae332019-02-21 16:29:43 +00001377 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001378 CHECK_VALID_SIZE(inputs.size(), 1);
1379
Derek Lamberti8ddae332019-02-21 16:29:43 +00001380 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001381 CHECK_VALID_SIZE(outputs.size(), 1);
1382 auto outputInfo = ToTensorInfo(outputs[0]);
1383
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001384 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001385 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1386
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001387 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001388 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1389
Derek Lamberti8ddae332019-02-21 16:29:43 +00001390 RegisterInputSlots(graph, layerIndex, layer);
1391 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001392}
1393
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001394armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001395 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001396{
1397 armnn::Pooling2dDescriptor desc;
1398
1399 switch (pooling2dDesc->poolType())
1400 {
1401 case PoolingAlgorithm_Average:
1402 {
1403 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001404 break;
1405 }
1406 case PoolingAlgorithm_Max:
1407 {
1408 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001409 break;
1410 }
1411 default:
1412 {
1413 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1414 }
1415 }
1416
1417 switch (pooling2dDesc->outputShapeRounding())
1418 {
1419 case OutputShapeRounding_Floor:
1420 {
1421 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1422 break;
1423 }
1424 case OutputShapeRounding_Ceiling:
1425 {
1426 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1427 break;
1428 }
1429 default:
1430 {
1431 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1432 }
1433 }
1434
1435 switch (pooling2dDesc->paddingMethod())
1436 {
1437 case PaddingMethod_Exclude:
1438 {
1439 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1440 break;
1441 }
1442 case PaddingMethod_IgnoreValue:
1443 {
1444 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1445 break;
1446 }
1447 default:
1448 {
1449 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1450 }
1451 }
1452
1453 switch (pooling2dDesc->dataLayout())
1454 {
1455 case DataLayout_NCHW:
1456 {
1457 desc.m_DataLayout = armnn::DataLayout::NCHW;
1458 break;
1459 }
1460 case DataLayout_NHWC:
1461 {
1462 desc.m_DataLayout = armnn::DataLayout::NHWC;
1463 break;
1464 }
1465 default:
1466 {
1467 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1468 }
1469 }
1470
1471 desc.m_PadRight = pooling2dDesc->padRight();
1472 desc.m_PadLeft = pooling2dDesc->padLeft();
1473 desc.m_PadBottom = pooling2dDesc->padBottom();
1474 desc.m_PadTop = pooling2dDesc->padTop();
1475 desc.m_StrideX = pooling2dDesc->strideX();
1476 desc.m_StrideY = pooling2dDesc->strideY();
1477 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1478 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1479
1480 return desc;
1481}
1482
Derek Lamberti8ddae332019-02-21 16:29:43 +00001483void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001484{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001485 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001486
Derek Lamberti8ddae332019-02-21 16:29:43 +00001487 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001488 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001489 CHECK_VALID_SIZE(inputs.size(), 1);
1490
Derek Lamberti8ddae332019-02-21 16:29:43 +00001491 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001492 CHECK_VALID_SIZE(outputs.size(), 1);
1493 auto outputInfo = ToTensorInfo(outputs[0]);
1494
1495 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001496 auto layerName = GetLayerName(graph, layerIndex);
1497 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001498 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1499
Derek Lamberti8ddae332019-02-21 16:29:43 +00001500 RegisterInputSlots(graph, layerIndex, layer);
1501 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001502}
1503
Derek Lamberti87acb272019-03-27 16:51:31 +00001504void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1505{
1506 CHECK_LAYERS(graph, 0, layerIndex);
1507
1508 auto inputs = GetInputs(graph, layerIndex);
1509 CHECK_VALID_SIZE(inputs.size(), 1);
1510
1511 auto outputs = GetOutputs(graph, layerIndex);
1512 CHECK_VALID_SIZE(outputs.size(), 1);
1513 auto outputInfo = ToTensorInfo(outputs[0]);
1514
1515 auto layerName = GetLayerName(graph, layerIndex);
1516 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1517 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1518
1519 RegisterInputSlots(graph, layerIndex, layer);
1520 RegisterOutputSlots(graph, layerIndex, layer);
1521}
1522
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001523armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001524 const std::vector<uint32_t>& targetDimsIn)
1525{
1526 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1527 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1528
1529 if (stretchDim != targetDimsIn.end())
1530 {
1531 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1532 {
1533 throw ParseException(boost::str(
1534 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1535 }
1536
1537 auto targetNumElements =
1538 boost::numeric_cast<unsigned int>(
1539 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1540
1541 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1542 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1543 }
1544
1545 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1546
1547 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1548 reshapeInfo.SetShape(outputShape);
1549
1550 return reshapeInfo;
1551}
1552
Derek Lamberti8ddae332019-02-21 16:29:43 +00001553void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001554{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001555 CHECK_LAYERS(graph, 0, layerIndex);
1556 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001557
Derek Lamberti8ddae332019-02-21 16:29:43 +00001558 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001559 CHECK_VALID_SIZE(outputs.size(), 1);
1560
1561 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1562 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1563
Derek Lamberti8ddae332019-02-21 16:29:43 +00001564 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001565 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1566
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001567 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001568 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1569
1570 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1571 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1572
1573 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1574 {
1575 std::stringstream ss;
1576 ss << "New shape defined in reshape parameters "
1577 << reshapeOutputTensorShape
1578 << " does not equal output shape "
1579 << actualOutputTensorInfo.GetShape()
1580 << ": "
1581 << CHECK_LOCATION().AsString();
1582 throw ParseException(ss.str());
1583 }
1584
1585 armnn::ReshapeDescriptor reshapeDesc;
1586 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1587
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001588 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001589 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1590 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1591
Derek Lamberti8ddae332019-02-21 16:29:43 +00001592 RegisterInputSlots(graph, layerIndex, layer);
1593 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001594}
1595
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001596void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1597{
1598 CHECK_LAYERS(graph, 0, layerIndex);
1599
1600 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1601 CHECK_VALID_SIZE(inputs.size(), 1);
1602
1603 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1604 CHECK_VALID_SIZE(outputs.size(), 1);
1605
1606 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1607
1608 armnn::ResizeBilinearDescriptor descriptor;
1609 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1610 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1611 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1612
1613 auto layerName = GetLayerName(graph, layerIndex);
1614 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1615
1616 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1617 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1618
1619 RegisterInputSlots(graph, layerIndex, layer);
1620 RegisterOutputSlots(graph, layerIndex, layer);
1621}
1622
Derek Lamberti8ddae332019-02-21 16:29:43 +00001623void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001624{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001625 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001626
Derek Lamberti8ddae332019-02-21 16:29:43 +00001627 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001628 CHECK_VALID_SIZE(inputs.size(), 1);
1629
Derek Lamberti8ddae332019-02-21 16:29:43 +00001630 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001631 CHECK_VALID_SIZE(outputs.size(), 1);
1632
1633 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001634 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001635 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001636
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001637 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1638
1639 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1640 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1641
Derek Lamberti8ddae332019-02-21 16:29:43 +00001642 RegisterInputSlots(graph, layerIndex, layer);
1643 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001644}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001645
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001646void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1647{
1648 CHECK_LAYERS(graph, 0, layerIndex);
1649
1650 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1651 CHECK_VALID_SIZE(inputs.size(), 1);
1652
1653 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1654 CHECK_VALID_SIZE(outputs.size(), 1);
1655
1656 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1657 auto flatBufferPadList = flatBufferDescriptor->padList();
1658 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1659
1660 if (flatBufferPadList->Length() % 2 != 0)
1661 {
1662 throw ParseException(boost::str(
1663 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1664 }
1665
1666 std::vector<std::pair<unsigned int, unsigned int>> padList;
1667 padList.reserve(flatBufferPadList->Length() / 2);
1668 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1669 {
1670 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1671 }
1672
1673 armnn::SpaceToBatchNdDescriptor descriptor;
1674 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1675 descriptor.m_BlockShape =
1676 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1677 descriptor.m_PadList = padList;
1678
1679 auto layerName = GetLayerName(graph, layerIndex);
1680 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1681
1682 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1683 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1684
1685 RegisterInputSlots(graph, layerIndex, layer);
1686 RegisterOutputSlots(graph, layerIndex, layer);
1687}
1688
Nina Drozd57728782019-02-27 10:53:27 +00001689armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1690 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1691 unsigned int layerIndex)
1692{
1693 armnn::NormalizationDescriptor desc;
1694
1695 switch (normalizationDescriptor->normChannelType())
1696 {
1697 case NormalizationAlgorithmChannel_Across:
1698 {
1699 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1700 break;
1701 }
1702 case NormalizationAlgorithmChannel_Within:
1703 {
1704 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1705 break;
1706 }
1707 default:
1708 {
1709 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1710 }
1711 }
1712
1713 switch (normalizationDescriptor->normMethodType())
1714 {
1715 case NormalizationAlgorithmMethod_LocalBrightness:
1716 {
1717 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1718 break;
1719 }
1720 case NormalizationAlgorithmMethod_LocalContrast:
1721 {
1722 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1723 break;
1724 }
1725 default:
1726 {
1727 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1728 }
1729 }
1730
1731 switch (normalizationDescriptor->dataLayout())
1732 {
1733 case DataLayout_NCHW:
1734 {
1735 desc.m_DataLayout = armnn::DataLayout::NCHW;
1736 break;
1737 }
1738 case DataLayout_NHWC:
1739 {
1740 desc.m_DataLayout = armnn::DataLayout::NHWC;
1741 break;
1742 }
1743 default:
1744 {
1745 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1746 }
1747 }
1748
1749 desc.m_Alpha = normalizationDescriptor->alpha();
1750 desc.m_Beta = normalizationDescriptor->beta();
1751 desc.m_K = normalizationDescriptor->k();
1752 desc.m_NormSize = normalizationDescriptor->normSize();
1753
1754 return desc;
1755}
1756
1757void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1758{
1759 CHECK_LAYERS(graph, 0, layerIndex);
1760
1761 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1762
1763 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1764 CHECK_VALID_SIZE(inputs.size(), 1);
1765
1766 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1767 CHECK_VALID_SIZE(outputs.size(), 1);
1768
1769 auto outputInfo = ToTensorInfo(outputs[0]);
1770
1771 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1772 auto layerName = GetLayerName(graph, layerIndex);
1773
1774 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1775 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1776
1777 RegisterInputSlots(graph, layerIndex, layer);
1778 RegisterOutputSlots(graph, layerIndex, layer);
1779}
1780
Sadik Armagan8b42a382019-03-01 14:24:49 +00001781void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1782{
1783 CHECK_LAYERS(graph, 0, layerIndex);
1784 auto inputs = GetInputs(graph, layerIndex);
1785 CHECK_LOCATION();
1786 CHECK_VALID_SIZE(inputs.size(), 1);
1787
1788 auto outputs = GetOutputs(graph, layerIndex);
1789 CHECK_VALID_SIZE(outputs.size(), 1);
1790
1791 auto layerName = GetLayerName(graph, layerIndex);
1792 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1793
1794 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1795 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1796
1797 RegisterInputSlots(graph, layerIndex, layer);
1798 RegisterOutputSlots(graph, layerIndex, layer);
1799}
1800
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001801void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1802{
1803 CHECK_LAYERS(graph, 0, layerIndex);
1804
1805 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1806 CHECK_VALID_SIZE(inputs.size(), 1);
1807
1808 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1809 CHECK_VALID_SIZE(outputs.size(), 1);
1810
1811 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1812
1813 auto flatBufferBegin = flatBufferDescriptor->begin();
1814 auto flatBufferEnd = flatBufferDescriptor->end();
1815 auto flatBufferStride = flatBufferDescriptor->stride();
1816
1817 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1818 flatBufferBegin->Length() == flatBufferStride->Length()))
1819 {
1820 throw ParseException(boost::str(
1821 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1822 }
1823
1824 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1825 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1826 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1827
1828 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1829 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1830 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1831 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1832 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1833 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1834 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1835
1836 auto layerName = GetLayerName(graph, layerIndex);
1837 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1838
1839 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1840 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1841
1842 RegisterInputSlots(graph, layerIndex, layer);
1843 RegisterOutputSlots(graph, layerIndex, layer);
1844}
1845
Conor Kennedyda1f9752019-03-01 14:37:12 +00001846void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1847{
1848 CHECK_LAYERS(graph, 0, layerIndex);
1849 auto inputs = GetInputs(graph, layerIndex);
1850 CHECK_LOCATION();
1851 CHECK_VALID_SIZE(inputs.size(), 2);
1852
1853 auto outputs = GetOutputs(graph, layerIndex);
1854 CHECK_VALID_SIZE(outputs.size(), 1);
1855
1856 auto layerName = GetLayerName(graph, layerIndex);
1857 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1858
1859 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1860 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1861
1862 RegisterInputSlots(graph, layerIndex, layer);
1863 RegisterOutputSlots(graph, layerIndex, layer);
1864}
1865
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001866void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1867{
1868 CHECK_LAYERS(graph, 0, layerIndex);
1869
1870 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1871 CHECK_VALID_SIZE(inputs.size(), 2);
1872
1873 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1874 CHECK_VALID_SIZE(outputs.size(), 1);
1875
1876 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001877 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1878
1879 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001880 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1881
1882 RegisterInputSlots(graph, layerIndex, layer);
1883 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001884}
1885
Sadik Armaganac97c8c2019-03-04 17:44:21 +00001886void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
1887{
1888 CHECK_LAYERS(graph, 0, layerIndex);
1889
1890 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1891 CHECK_VALID_SIZE(inputs.size(), 1);
1892
1893 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1894 CHECK_VALID_SIZE(outputs.size(), 1);
1895
1896 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
1897 auto flatBufferAxis = flatBufferDescriptor->axis();
1898 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
1899
1900 armnn::MeanDescriptor descriptor;
1901 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
1902 descriptor.m_KeepDims = flatBufferKeepDims;
1903
1904 auto layerName = GetLayerName(graph, layerIndex);
1905 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
1906
1907 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1908 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1909
1910 RegisterInputSlots(graph, layerIndex, layer);
1911 RegisterOutputSlots(graph, layerIndex, layer);
1912}
1913
Jim Flynn18ce3382019-03-08 11:08:30 +00001914void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
1915{
1916 CHECK_LAYERS(graph, 0, layerIndex);
1917
1918 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1919 CHECK_VALID_SIZE(inputs.size(), 1);
1920
1921 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1922
1923 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
1924 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
1925 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
1926 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
1927 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
1928 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
1929
1930 // Check numViews and numDimensions corresponds to the ones already serialized ...
1931 // numViews == flatBufferViewSizes.size();
1932 // foreach: numDimensions == flatBufferViewSizes[x].size();
1933
1934 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
1935 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1936 {
1937 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
1938 {
1939 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
1940 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
1941 }
1942 }
1943
1944 auto layerName = GetLayerName(graph, layerIndex);
1945 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
1946
1947 // I could have as many outputs as views ...
1948 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1949 {
1950 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
1951 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
1952 }
1953
1954 RegisterInputSlots(graph, layerIndex, layer);
1955 RegisterOutputSlots(graph, layerIndex, layer);
1956}
1957
Jim Flynn11af3752019-03-19 17:22:29 +00001958armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
1959{
1960 armnn::LstmDescriptor desc;
1961
1962 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
1963 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
1964 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
1965 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
1966 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
1967 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
1968
1969 return desc;
1970}
1971
1972void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
1973{
1974 CHECK_LAYERS(graph, 0, layerIndex);
1975
1976 auto inputs = GetInputs(graph, layerIndex);
1977 CHECK_VALID_SIZE(inputs.size(), 3);
1978
1979 auto outputs = GetOutputs(graph, layerIndex);
1980 CHECK_VALID_SIZE(outputs.size(), 4);
1981
1982 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
1983 auto layerName = GetLayerName(graph, layerIndex);
1984 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1985 auto flatBufferInputParams = flatBufferLayer->inputParams();
1986
1987 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
1988
1989 armnn::LstmInputParams lstmInputParams;
1990
1991 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
1992 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
1993 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
1994 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
1995 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
1996 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
1997 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
1998 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
1999 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2000
2001 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2002 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2003 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2004 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2005 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2006 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2007 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2008 lstmInputParams.m_CellBias = &cellBias;
2009 lstmInputParams.m_OutputGateBias = &outputGateBias;
2010
2011 armnn::ConstTensor inputToInputWeights;
2012 armnn::ConstTensor recurrentToInputWeights;
2013 armnn::ConstTensor cellToInputWeights;
2014 armnn::ConstTensor inputGateBias;
2015 if (!lstmDescriptor.m_CifgEnabled)
2016 {
2017 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2018 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2019 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2020 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2021
2022 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2023 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2024 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2025 lstmInputParams.m_InputGateBias = &inputGateBias;
2026 }
2027
2028 armnn::ConstTensor projectionWeights;
2029 armnn::ConstTensor projectionBias;
2030 if (lstmDescriptor.m_ProjectionEnabled)
2031 {
2032 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2033 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2034
2035 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2036 lstmInputParams.m_ProjectionBias = &projectionBias;
2037 }
2038
2039 armnn::ConstTensor cellToForgetWeights;
2040 armnn::ConstTensor cellToOutputWeights;
2041 if (lstmDescriptor.m_PeepholeEnabled)
2042 {
2043 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2044 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2045
2046 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2047 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2048 }
2049
2050 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2051
2052 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2053 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2054
2055 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2056 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2057
2058 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2059 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2060
2061 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2062 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2063
2064 RegisterInputSlots(graph, layerIndex, layer);
2065 RegisterOutputSlots(graph, layerIndex, layer);
2066}
2067
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002068void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2069{
2070 CHECK_LAYERS(graph, 0, layerIndex);
2071
2072 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2073 CHECK_VALID_SIZE(inputs.size(), 1);
2074
2075 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2076 CHECK_VALID_SIZE(outputs.size(), 1);
2077
2078 const std::string layerName = GetLayerName(graph, layerIndex);
2079 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2080
2081 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2082 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2083
2084 RegisterInputSlots(graph, layerIndex, layer);
2085 RegisterOutputSlots(graph, layerIndex, layer);
2086}
2087
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002088} // namespace armnnDeserializer