blob: 92212b654a24e8a334930efd9151dd3f256572a3 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100188 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100191 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000192 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000193 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100194 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100195 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000196 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000197 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100198 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000199 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000200 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000201 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000202 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000203 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000204 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000205 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000206 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000207 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100208 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000209 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100210 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000211 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000212 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000213 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
214 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100215 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100216 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000217 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000218 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000219 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000220 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000221 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100222 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000223 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100224 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000225 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000226 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100227 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000228 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100229 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000230 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000231 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100232 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000233 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100234 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100235 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000236 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000237 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100238 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100239 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000240}
241
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000242Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000243{
244 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
245
246 switch(layerType)
247 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100248 case Layer::Layer_AbsLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000250 case Layer::Layer_ActivationLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000252 case Layer::Layer_AdditionLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100254 case Layer::Layer_ArgMinMaxLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000256 case Layer::Layer_BatchToSpaceNdLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000258 case Layer::Layer_BatchNormalizationLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100260 case Layer::Layer_ComparisonLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100262 case Layer::Layer_ConcatLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000264 case Layer::Layer_ConstantLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000266 case Layer::Layer_Convolution2dLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100268 case Layer::Layer_DepthToSpaceLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000270 case Layer::Layer_DepthwiseConvolution2dLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000272 case Layer::Layer_DequantizeLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000274 case Layer::Layer_DetectionPostProcessLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000276 case Layer::Layer_DivisionLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000278 case Layer::Layer_EqualLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000280 case Layer::Layer_FullyConnectedLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000282 case Layer::Layer_FloorLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000284 case Layer::Layer_GatherLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000286 case Layer::Layer_GreaterLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000288 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000289 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100290 case Layer::Layer_InstanceNormalizationLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000292 case Layer::Layer_L2NormalizationLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100294 case Layer::Layer_LogSoftmaxLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000296 case Layer::Layer_LstmLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000298 case Layer::Layer_MeanLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000300 case Layer::Layer_MinimumLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000302 case Layer::Layer_MaximumLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100304 case Layer::Layer_MergeLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000306 case Layer::Layer_MergerLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000308 case Layer::Layer_MultiplicationLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000310 case Layer::Layer_NormalizationLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000312 case Layer::Layer_OutputLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000314 case Layer::Layer_PadLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000316 case Layer::Layer_PermuteLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000318 case Layer::Layer_Pooling2dLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100320 case Layer::Layer_PreluLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000322 case Layer::Layer_QuantizeLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100324 case Layer::Layer_QuantizedLstmLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000326 case Layer::Layer_ReshapeLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000328 case Layer::Layer_ResizeBilinearLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100330 case Layer::Layer_ResizeLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000332 case Layer::Layer_RsqrtLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100334 case Layer::Layer_SliceLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000336 case Layer::Layer_SoftmaxLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000338 case Layer::Layer_SpaceToBatchNdLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100340 case Layer::Layer_SpaceToDepthLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000342 case Layer::Layer_SplitterLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100344 case Layer::Layer_StackLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100346 case Layer::Layer_StandInLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000348 case Layer::Layer_StridedSliceLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000350 case Layer::Layer_SubtractionLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100352 case Layer::Layer_SwitchLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100354 case Layer::Layer_TransposeConvolution2dLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000356 case Layer::Layer_NONE:
357 default:
358 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100359 boost::format("Layer type %1% not recognized") %
360 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000361 }
362}
363
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000364std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
365{
366 auto layer = GetBaseLayer(graph, index);
367 assert(layer);
368 return layer->layerName()->str();
369}
370
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000371int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000372{
373 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
374
375 if (layerType == Layer::Layer_InputLayer)
376 {
377 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
378 }
379 else if ( layerType == Layer::Layer_OutputLayer )
380 {
381 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
382 }
383 return 0;
384}
385
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000386armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000387{
388 switch (dataLayout)
389 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000390 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000391 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000392 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000393 default:
394 return armnn::DataLayout::NCHW;
395 }
396}
397
Mike Kellyaf484012019-02-20 16:53:11 +0000398armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
399{
400 switch (function)
401 {
402 case armnnSerializer::ActivationFunction_Sigmoid:
403 return armnn::ActivationFunction::Sigmoid;
404 case armnnSerializer::ActivationFunction_TanH:
405 return armnn::ActivationFunction::TanH;
406 case armnnSerializer::ActivationFunction_Linear:
407 return armnn::ActivationFunction::Linear;
408 case armnnSerializer::ActivationFunction_ReLu:
409 return armnn::ActivationFunction::ReLu;
410 case armnnSerializer::ActivationFunction_BoundedReLu:
411 return armnn::ActivationFunction::BoundedReLu;
412 case armnnSerializer::ActivationFunction_LeakyReLu:
413 return armnn::ActivationFunction::LeakyReLu;
414 case armnnSerializer::ActivationFunction_Abs:
415 return armnn::ActivationFunction::Abs;
416 case armnnSerializer::ActivationFunction_Sqrt:
417 return armnn::ActivationFunction::Sqrt;
418 case armnnSerializer::ActivationFunction_Square:
419 return armnn::ActivationFunction::Square;
420 default:
421 return armnn::ActivationFunction::Sigmoid;
422 }
423}
424
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100425armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
426{
427 switch (function)
428 {
429 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
430 return armnn::ArgMinMaxFunction::Max;
431 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
432 default:
433 return armnn::ArgMinMaxFunction::Min;
434 }
435}
436
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100437armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
438{
439 switch (operation)
440 {
441 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
442 return armnn::ComparisonOperation::Equal;
443 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
444 return armnn::ComparisonOperation::Greater;
445 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
446 return armnn::ComparisonOperation::GreaterOrEqual;
447 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
448 return armnn::ComparisonOperation::Less;
449 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
450 return armnn::ComparisonOperation::LessOrEqual;
451 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
452 default:
453 return armnn::ComparisonOperation::NotEqual;
454 }
455}
456
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100457armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
458{
459 switch (method)
460 {
461 case armnnSerializer::ResizeMethod_NearestNeighbor:
462 return armnn::ResizeMethod::NearestNeighbor;
463 case armnnSerializer::ResizeMethod_Bilinear:
464 return armnn::ResizeMethod::NearestNeighbor;
465 default:
466 return armnn::ResizeMethod::NearestNeighbor;
467 }
468}
469
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000470armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000471{
472 armnn::DataType type;
473 CHECK_TENSOR_PTR(tensorPtr);
474
475 switch (tensorPtr->dataType())
476 {
477 case DataType_QuantisedAsymm8:
478 type = armnn::DataType::QuantisedAsymm8;
479 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000480 case DataType_QuantisedSymm16:
481 type = armnn::DataType::QuantisedSymm16;
482 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000483 case DataType_Signed32:
484 type = armnn::DataType::Signed32;
485 break;
Kevin May43a799c2019-02-08 16:31:42 +0000486 case DataType_Float32:
487 type = armnn::DataType::Float32;
488 break;
489 case DataType_Float16:
490 type = armnn::DataType::Float16;
491 break;
492 case DataType_Boolean:
493 type = armnn::DataType::Boolean;
494 break;
495 default:
496 {
497 CheckLocation location = CHECK_LOCATION();
498 throw ParseException(
499 boost::str(
500 boost::format("Unsupported data type %1% = %2%. %3%") %
501 tensorPtr->dataType() %
502 EnumNameDataType(tensorPtr->dataType()) %
503 location.AsString()));
504 }
505 }
506 float quantizationScale = tensorPtr->quantizationScale();
507 int32_t quantizationOffset = tensorPtr->quantizationOffset();
508
509 auto dimensions = tensorPtr->dimensions();
510 unsigned int size = dimensions->size();
511 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
512
513 // two statements (on purpose) for easier debugging:
514 armnn::TensorInfo result(size,
515 outputDims.data(),
516 type,
517 quantizationScale,
518 quantizationOffset);
519 return result;
520}
521
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000522armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000523{
524 CHECK_CONST_TENSOR_PTR(constTensorPtr);
525 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
526
527 switch (constTensorPtr->data_type())
528 {
529 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000530 {
531 auto byteData = constTensorPtr->data_as_ByteData()->data();
532 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
533 return armnn::ConstTensor(tensorInfo, byteData->data());
534 }
Mike Kellya0766c32019-02-19 17:22:07 +0000535 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000536 {
537 auto shortData = constTensorPtr->data_as_ShortData()->data();
538 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
539 return armnn::ConstTensor(tensorInfo, shortData->data());
540 }
Mike Kellya0766c32019-02-19 17:22:07 +0000541 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000542 {
543 auto intData = constTensorPtr->data_as_IntData()->data();
544 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
545 return armnn::ConstTensor(tensorInfo, intData->data());
546 }
Mike Kellya0766c32019-02-19 17:22:07 +0000547 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000548 {
549 auto longData = constTensorPtr->data_as_LongData()->data();
550 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
551 return armnn::ConstTensor(tensorInfo, longData->data());
552 }
Mike Kellya0766c32019-02-19 17:22:07 +0000553 default:
554 {
555 CheckLocation location = CHECK_LOCATION();
556 throw ParseException(
557 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
558 constTensorPtr->data_type() %
559 EnumNameConstTensorData(constTensorPtr->data_type()) %
560 location.AsString()));
561 }
562 }
563}
564
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000565Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000566 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000567{
568 CHECK_LAYERS(graphPtr, 0, layerIndex);
569 auto layer = GetBaseLayer(graphPtr, layerIndex);
570 const auto& numInputs = layer->inputSlots()->size();
571
572 TensorRawPtrVector result(numInputs);
573
574 for (unsigned int i=0; i<numInputs; ++i)
575 {
576 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
577 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
578 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
579 }
580 return result;
581}
582
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000583Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000584 unsigned int layerIndex)
585{
586 CHECK_LAYERS(graphPtr, 0, layerIndex);
587 auto layer = GetBaseLayer(graphPtr, layerIndex);
588 const auto& numOutputs = layer->outputSlots()->size();
589
590 TensorRawPtrVector result(numOutputs);
591
592 for (unsigned int i=0; i<numOutputs; ++i)
593 {
594 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
595 }
596 return result;
597}
598
Derek Lamberti8ddae332019-02-21 16:29:43 +0000599void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000600{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000601 CHECK_LAYERS(graph, 0, layerIndex);
602 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000603 throw ParseException(
604 boost::str(
605 boost::format("Layer not supported. "
606 "layerIndex: %1% "
607 "layerName: %2% / %3%") %
608 layerIndex %
609 layerName %
610 CHECK_LOCATION().AsString()));
611}
612
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000613void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000614{
615 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000616 m_InputBindings.clear();
617 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000618}
619
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000620IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000621{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000622 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000623}
624
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000625IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000626{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000627 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000628}
629
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000630void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000631{
632 delete parser;
633}
634
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000635INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000636{
637 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000638 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
639 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000640}
641
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000642armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000643{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000644 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000645 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
646 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
647 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000648}
649
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000650Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000651{
652 if (binaryContent == nullptr)
653 {
654 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
655 CHECK_LOCATION().AsString()));
656 }
657 flatbuffers::Verifier verifier(binaryContent, len);
658 if (verifier.VerifyBuffer<SerializedGraph>() == false)
659 {
660 throw ParseException(
661 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
662 "flatbuffers format. size:%1% %2%") %
663 len %
664 CHECK_LOCATION().AsString()));
665 }
666 return GetSerializedGraph(binaryContent);
667}
668
Derek Lamberti8ddae332019-02-21 16:29:43 +0000669INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000670{
671 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000673 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000674 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000675 {
676 if (layer->layer_type() != Layer_InputLayer &&
677 layer->layer_type() != Layer_OutputLayer)
678 {
679 // lookup and call the parser function
680 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000681 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000682 }
683 ++layerIndex;
684 }
685
Derek Lamberti8ddae332019-02-21 16:29:43 +0000686 SetupInputLayers(graph);
687 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000688
689 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100690 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000691 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100692 Connections& connections = graphIt.second;
693 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000694 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100695 const unsigned int outputSlotIndex = outputIt.first;
696 IOutputSlot* outputSlot = outputIt.second;
697 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000698 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100699 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000700 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100701 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000702 }
Kevin May43a799c2019-02-08 16:31:42 +0000703 }
704 }
705 }
706
707 return std::move(m_Network);
708}
709
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000710BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000711 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000712{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000713 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000714 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000715 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000716 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000717 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000718 }
719 }
720 throw ParseException(
721 boost::str(
722 boost::format("No input binding found for layer:%1% / %2%") %
723 name %
724 CHECK_LOCATION().AsString()));
725}
726
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000727BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000728 const std::string& name) const
729{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000730 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000731 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000732 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000733 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000734 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000735 }
736 }
737 throw ParseException(
738 boost::str(
739 boost::format("No output binding found for layer:%1% / %2%") %
740 name %
741 CHECK_LOCATION().AsString()));
742}
743
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100744unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
745{
746 for (unsigned int i = 0; i < graph->layers()->size(); i++)
747 {
748 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
749 if (layer->index() == targetIndex)
750 {
751 return i;
752 }
753 }
754 throw ParseException("Layer with given index not found");
755}
756
Derek Lamberti8ddae332019-02-21 16:29:43 +0000757void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000758{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000759 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100760 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000761 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100762 m_InputBindings.reserve(numInputs);
763
764 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000765 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100766 const unsigned int inputId = graph->inputIds()->Get(i);
767 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
768 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000769
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100770 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
771 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
772 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000773
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100774 IConnectableLayer* inputLayer =
775 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000776
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100777 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
778 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
779 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
780
Derek Lamberti8ddae332019-02-21 16:29:43 +0000781 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100782 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000783 }
784}
785
Derek Lamberti8ddae332019-02-21 16:29:43 +0000786void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000787{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000788 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100789 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000790 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100791 m_OutputBindings.reserve(numOutputs);
792
793 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000794 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100795 const unsigned int outputId = graph->outputIds()->Get(i);
796 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
797 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000798
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100799 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
800 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
801 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000802
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100803 IConnectableLayer* outputLayer =
804 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000805
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100806 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
807
808 unsigned int sourceLayerIndex =
809 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
810 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
811 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
812
Derek Lamberti8ddae332019-02-21 16:29:43 +0000813 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100814 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000815 }
816}
817
Derek Lamberti8ddae332019-02-21 16:29:43 +0000818void Deserializer::RegisterOutputSlots(GraphPtr graph,
819 uint32_t layerIndex,
820 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000821{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000822 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000823 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100824 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
825 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000826 {
827 throw ParseException(
828 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
829 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100830 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000831 layer->GetNumOutputSlots() %
832 layerIndex %
833 CHECK_LOCATION().AsString()));
834 }
835
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100836 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000837 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100838 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
839 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
840 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
841 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000842 }
843}
844
Derek Lamberti8ddae332019-02-21 16:29:43 +0000845void Deserializer::RegisterInputSlots(GraphPtr graph,
846 uint32_t layerIndex,
847 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000848{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000849 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000850 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100851 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
852 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000853 {
854 throw ParseException(
855 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
856 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100857 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000858 layer->GetNumInputSlots() %
859 layerIndex %
860 CHECK_LOCATION().AsString()));
861 }
862
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100863 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000864 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100865 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
866 auto fbConnection = fbInputSlot->connection();
867 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
868 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000869 }
870}
871
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000872void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
873 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100874 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000875{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100876 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000877 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100878 m_GraphConnections[sourceLayerIndex] = Connections();
879 }
880
881 Connections& connections = m_GraphConnections[sourceLayerIndex];
882 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
883 {
884 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000885 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000886 else
887 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100888 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000889 }
890}
Kevin May43a799c2019-02-08 16:31:42 +0000891
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000892void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100893 uint32_t outputSlotIndex,
894 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000895{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100896 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
897 {
898 m_GraphConnections[sourceLayerIndex] = Connections();
899 }
900
901 Connections& connections = m_GraphConnections[sourceLayerIndex];
902 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
903 {
904 throw ParseException("Same output slot index processed twice");
905 }
906
907 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000908}
909
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100910void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
911{
912 CHECK_LAYERS(graph, 0, layerIndex);
913 auto inputs = GetInputs(graph, layerIndex);
914 CHECK_LOCATION();
915 CHECK_VALID_SIZE(inputs.size(), 1);
916
917 auto outputs = GetOutputs(graph, layerIndex);
918 CHECK_VALID_SIZE(outputs.size(), 1);
919
920 auto layerName = GetLayerName(graph, layerIndex);
921
922 IConnectableLayer* layer = m_Network->AddAbsLayer(layerName.c_str());
923 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
924 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
925
926 RegisterInputSlots(graph, layerIndex, layer);
927 RegisterOutputSlots(graph, layerIndex, layer);
928}
929
Derek Lamberti8ddae332019-02-21 16:29:43 +0000930void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000931{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000932 CHECK_LAYERS(graph, 0, layerIndex);
933 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000934 CHECK_LOCATION();
935 CHECK_VALID_SIZE(inputs.size(), 1);
936
Derek Lamberti8ddae332019-02-21 16:29:43 +0000937 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000938 CHECK_VALID_SIZE(outputs.size(), 1);
939
Derek Lamberti8ddae332019-02-21 16:29:43 +0000940 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000941 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000942 auto serializerDescriptor = serializerLayer->descriptor();
943
944 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +0900945 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +0000946 descriptor.m_A = serializerDescriptor->a();
947 descriptor.m_B = serializerDescriptor->b();
948
949 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
950 layerName.c_str());
951 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
952 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
953
Derek Lamberti8ddae332019-02-21 16:29:43 +0000954 RegisterInputSlots(graph, layerIndex, layer);
955 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000956}
957
Derek Lamberti8ddae332019-02-21 16:29:43 +0000958void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000959{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000960 CHECK_LAYERS(graph, 0, layerIndex);
961 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000962 CHECK_LOCATION();
963 CHECK_VALID_SIZE(inputs.size(), 2);
964
Derek Lamberti8ddae332019-02-21 16:29:43 +0000965 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000966 CHECK_VALID_SIZE(outputs.size(), 1);
967
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000968 auto layerName = GetLayerName(graph, layerIndex);
969 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000970
971 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
972 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
973
Derek Lamberti8ddae332019-02-21 16:29:43 +0000974 RegisterInputSlots(graph, layerIndex, layer);
975 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000976}
977
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100978void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
979{
980 CHECK_LAYERS(graph, 0, layerIndex);
981 auto inputs = GetInputs(graph, layerIndex);
982 CHECK_LOCATION();
983 CHECK_VALID_SIZE(inputs.size(), 1);
984
985 auto outputs = GetOutputs(graph, layerIndex);
986 CHECK_VALID_SIZE(outputs.size(), 1);
987
988 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
989 auto serializerDescriptor = serializerLayer->descriptor();
990
991 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +0900992 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100993 descriptor.m_Axis = serializerDescriptor->axis();
994 auto layerName = GetLayerName(graph, layerIndex);
995 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
996
997 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
998 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
999
1000 RegisterInputSlots(graph, layerIndex, layer);
1001 RegisterOutputSlots(graph, layerIndex, layer);
1002}
1003
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001004void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1005{
1006 CHECK_LAYERS(graph, 0, layerIndex);
1007
1008 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1009 CHECK_VALID_SIZE(inputs.size(), 1);
1010
1011 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1012 CHECK_VALID_SIZE(outputs.size(), 1);
1013
1014 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1015 auto flatBufferCrops = flatBufferDescriptor->crops();
1016 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1017
1018 if (flatBufferCrops->Length() % 2 != 0)
1019 {
1020 throw ParseException(boost::str(
1021 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1022 }
1023
1024 std::vector<std::pair<unsigned int, unsigned int>> crops;
1025 crops.reserve(flatBufferCrops->Length() / 2);
1026 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1027 {
1028 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1029 }
1030
1031 armnn::BatchToSpaceNdDescriptor descriptor;
1032 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1033 descriptor.m_BlockShape =
1034 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1035 descriptor.m_Crops = crops;
1036
1037 auto layerName = GetLayerName(graph, layerIndex);
1038 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1039
1040 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1041 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1042
1043 RegisterInputSlots(graph, layerIndex, layer);
1044 RegisterOutputSlots(graph, layerIndex, layer);
1045}
1046
ruoyan018e7fa232019-02-28 15:09:07 +00001047void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1048{
1049 CHECK_LAYERS(graph, 0, layerIndex);
1050
1051 auto inputs = GetInputs(graph, layerIndex);
1052 CHECK_VALID_SIZE(inputs.size(), 1);
1053
1054 auto outputs = GetOutputs(graph, layerIndex);
1055 CHECK_VALID_SIZE(outputs.size(), 1);
1056 auto outputInfo = ToTensorInfo(outputs[0]);
1057
ruoyan015c7ab052019-03-04 14:48:02 +00001058 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001059
1060 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1061 auto serializerDescriptor = serializerLayer->descriptor();
1062
1063 armnn::BatchNormalizationDescriptor descriptor;
1064 descriptor.m_Eps = serializerDescriptor->eps();
1065 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1066
1067 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1068 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1069 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1070 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1071
1072 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1073 mean,
1074 variance,
1075 beta,
1076 gamma,
1077 layerName.c_str());
1078 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1079
1080 RegisterInputSlots(graph, layerIndex, layer);
1081 RegisterOutputSlots(graph, layerIndex, layer);
1082}
1083
Conor Kennedy76277882019-02-26 08:29:54 +00001084void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1085{
1086 CHECK_LAYERS(graph, 0, layerIndex);
1087 CHECK_LOCATION();
1088
1089 auto outputs = GetOutputs(graph, layerIndex);
1090 CHECK_VALID_SIZE(outputs.size(), 1);
1091
1092 auto layerName = GetLayerName(graph, layerIndex);
1093
1094 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1095 auto serializerInput = serializerLayer->input();
1096
1097 armnn::ConstTensor input = ToConstTensor(serializerInput);
1098
1099 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1100
1101 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1102 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1103
1104 RegisterOutputSlots(graph, layerIndex, layer);
1105}
1106
Derek Lamberti8ddae332019-02-21 16:29:43 +00001107void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001108{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001109 CHECK_LAYERS(graph, 0, layerIndex);
1110 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001111 CHECK_LOCATION();
1112 CHECK_VALID_SIZE(inputs.size(), 1);
1113
Derek Lamberti8ddae332019-02-21 16:29:43 +00001114 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001115 CHECK_VALID_SIZE(outputs.size(), 1);
1116
Derek Lamberti8ddae332019-02-21 16:29:43 +00001117 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001118 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001119 auto serializerDescriptor = serializerLayer->descriptor();
1120
1121 armnn::Convolution2dDescriptor descriptor;
1122 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1123 descriptor.m_PadRight = serializerDescriptor->padRight();
1124 descriptor.m_PadTop = serializerDescriptor->padTop();
1125 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1126 descriptor.m_StrideX = serializerDescriptor->strideX();
1127 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001128 descriptor.m_DilationX = serializerDescriptor->dilationX();
1129 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001130 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1131 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1132
1133 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1134 armnn::ConstTensor biases;
1135
Matteo Martincighfc598e12019-05-14 10:36:13 +01001136 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001137 if (descriptor.m_BiasEnabled)
1138 {
1139 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001140 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001141 }
1142 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1143 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001144 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001145 layerName.c_str());
1146 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1147 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1148
Derek Lamberti8ddae332019-02-21 16:29:43 +00001149 RegisterInputSlots(graph, layerIndex, layer);
1150 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001151}
1152
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001153void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1154{
1155 CHECK_LAYERS(graph, 0, layerIndex);
1156
1157 auto inputs = GetInputs(graph, layerIndex);
1158 CHECK_VALID_SIZE(inputs.size(), 1);
1159
1160 auto outputs = GetOutputs(graph, layerIndex);
1161 CHECK_VALID_SIZE(outputs.size(), 1);
1162
1163 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1164
1165 armnn::DepthToSpaceDescriptor descriptor;
1166 descriptor.m_BlockSize = fbDescriptor->blockSize();
1167 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1168
1169 auto layerName = GetLayerName(graph, layerIndex);
1170 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1171
1172 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1173 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1174
1175 RegisterInputSlots(graph, layerIndex, layer);
1176 RegisterOutputSlots(graph, layerIndex, layer);
1177}
1178
Derek Lamberti8ddae332019-02-21 16:29:43 +00001179void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001180{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001181 CHECK_LAYERS(graph, 0, layerIndex);
1182 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001183 CHECK_LOCATION();
1184 CHECK_VALID_SIZE(inputs.size(), 1);
1185
Derek Lamberti8ddae332019-02-21 16:29:43 +00001186 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001187 CHECK_VALID_SIZE(outputs.size(), 1);
1188
Derek Lamberti8ddae332019-02-21 16:29:43 +00001189 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001190 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001191 auto serializerDescriptor = serializerLayer->descriptor();
1192
1193 armnn::DepthwiseConvolution2dDescriptor descriptor;
1194 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1195 descriptor.m_PadRight = serializerDescriptor->padRight();
1196 descriptor.m_PadTop = serializerDescriptor->padTop();
1197 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1198 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001199 descriptor.m_StrideY = serializerDescriptor->strideY();
1200 descriptor.m_DilationX = serializerDescriptor->dilationX();
1201 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001202 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1203 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1204
1205 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1206 armnn::ConstTensor biases;
1207
Matteo Martincighfc598e12019-05-14 10:36:13 +01001208 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001209 if (descriptor.m_BiasEnabled)
1210 {
1211 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001212 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001213 }
1214 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1215 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001216 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001217 layerName.c_str());
1218
1219 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1220 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1221
Derek Lamberti8ddae332019-02-21 16:29:43 +00001222 RegisterInputSlots(graph, layerIndex, layer);
1223 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001224}
1225
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001226void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1227{
1228 CHECK_LAYERS(graph, 0, layerIndex);
1229 auto inputs = GetInputs(graph, layerIndex);
1230 CHECK_LOCATION();
1231 CHECK_VALID_SIZE(inputs.size(), 2);
1232
1233 auto outputs = GetOutputs(graph, layerIndex);
1234 CHECK_VALID_SIZE(outputs.size(), 4);
1235
1236 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1237 auto layerName = GetLayerName(graph, layerIndex);
1238 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1239
1240 armnn::DetectionPostProcessDescriptor descriptor;
1241 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1242 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1243 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1244 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1245 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1246 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1247 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1248 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1249 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1250 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1251 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1252
1253 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1254
1255 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1256 anchors,
1257 layerName.c_str());
1258
1259 for (unsigned int i = 0; i < 4; i++)
1260 {
1261 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1262 }
1263
1264 RegisterInputSlots(graph, layerIndex, layer);
1265 RegisterOutputSlots(graph, layerIndex, layer);
1266}
1267
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001268void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1269{
1270 CHECK_LAYERS(graph, 0, layerIndex);
1271 auto inputs = GetInputs(graph, layerIndex);
1272 CHECK_LOCATION();
1273 CHECK_VALID_SIZE(inputs.size(), 2);
1274
1275 auto outputs = GetOutputs(graph, layerIndex);
1276 CHECK_VALID_SIZE(outputs.size(), 1);
1277
1278 auto layerName = GetLayerName(graph, layerIndex);
1279 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1280
1281 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1282 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1283
1284 RegisterInputSlots(graph, layerIndex, layer);
1285 RegisterOutputSlots(graph, layerIndex, layer);
1286}
1287
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001288void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1289{
1290 CHECK_LAYERS(graph, 0, layerIndex);
1291 auto inputs = GetInputs(graph, layerIndex);
1292 CHECK_LOCATION();
1293 CHECK_VALID_SIZE(inputs.size(), 2);
1294
1295 auto outputs = GetOutputs(graph, layerIndex);
1296 CHECK_VALID_SIZE(outputs.size(), 1);
1297
1298 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001299 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1300 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001301
1302 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1303 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1304
1305 RegisterInputSlots(graph, layerIndex, layer);
1306 RegisterOutputSlots(graph, layerIndex, layer);
1307}
1308
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001309void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1310{
1311 CHECK_LAYERS(graph, 0, layerIndex);
1312 auto inputs = GetInputs(graph, layerIndex);
1313 CHECK_LOCATION();
1314 CHECK_VALID_SIZE(inputs.size(), 2);
1315
1316 auto outputs = GetOutputs(graph, layerIndex);
1317 CHECK_VALID_SIZE(outputs.size(), 1);
1318
1319 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001320 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1321 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001322
1323 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1324 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1325
1326 RegisterInputSlots(graph, layerIndex, layer);
1327 RegisterOutputSlots(graph, layerIndex, layer);
1328}
1329
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001330void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1331{
1332 CHECK_LAYERS(graph, 0, layerIndex);
1333
1334 auto inputs = GetInputs(graph, layerIndex);
1335 CHECK_VALID_SIZE(inputs.size(), 1);
1336
1337 auto outputs = GetOutputs(graph, layerIndex);
1338 CHECK_VALID_SIZE(outputs.size(), 1);
1339
1340 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1341 auto fbDescriptor = fbLayer->descriptor();
1342
1343 armnn::InstanceNormalizationDescriptor descriptor;
1344 descriptor.m_Gamma = fbDescriptor->gamma();
1345 descriptor.m_Beta = fbDescriptor->beta();
1346 descriptor.m_Eps = fbDescriptor->eps();
1347 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1348
1349 const std::string layerName = GetLayerName(graph, layerIndex);
1350 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1351
1352 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1353 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1354
1355 RegisterInputSlots(graph, layerIndex, layer);
1356 RegisterOutputSlots(graph, layerIndex, layer);
1357}
1358
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001359void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1360{
1361 CHECK_LAYERS(graph, 0, layerIndex);
1362
1363 auto inputs = GetInputs(graph, layerIndex);
1364 CHECK_VALID_SIZE(inputs.size(), 1);
1365
1366 auto outputs = GetOutputs(graph, layerIndex);
1367 CHECK_VALID_SIZE(outputs.size(), 1);
1368 auto outputInfo = ToTensorInfo(outputs[0]);
1369
1370 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1371 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1372
1373 auto layerName = GetLayerName(graph, layerIndex);
1374 armnn::L2NormalizationDescriptor descriptor;
1375 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001376 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001377
1378 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1379 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1380
1381 RegisterInputSlots(graph, layerIndex, layer);
1382 RegisterOutputSlots(graph, layerIndex, layer);
1383}
1384
Sadik Armagan26257852019-10-14 13:00:47 +01001385void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1386{
1387 CHECK_LAYERS(graph, 0, layerIndex);
1388
1389 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1390 CHECK_VALID_SIZE(inputs.size(), 1);
1391
1392 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1393 CHECK_VALID_SIZE(outputs.size(), 1);
1394
1395 armnn::LogSoftmaxDescriptor descriptor;
1396 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1397 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1398 auto layerName = GetLayerName(graph, layerIndex);
1399
1400 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1401
1402 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1403 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1404
1405 RegisterInputSlots(graph, layerIndex, layer);
1406 RegisterOutputSlots(graph, layerIndex, layer);
1407}
1408
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001409void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1410{
1411 CHECK_LAYERS(graph, 0, layerIndex);
1412 auto inputs = GetInputs(graph, layerIndex);
1413 CHECK_LOCATION();
1414 CHECK_VALID_SIZE(inputs.size(), 2);
1415
1416 auto outputs = GetOutputs(graph, layerIndex);
1417 CHECK_VALID_SIZE(outputs.size(), 1);
1418
1419 auto layerName = GetLayerName(graph, layerIndex);
1420 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1421
1422 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1423 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1424
1425 RegisterInputSlots(graph, layerIndex, layer);
1426 RegisterOutputSlots(graph, layerIndex, layer);
1427}
1428
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001429void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1430{
1431 CHECK_LAYERS(graph, 0, layerIndex);
1432 auto inputs = GetInputs(graph, layerIndex);
1433 CHECK_LOCATION();
1434 CHECK_VALID_SIZE(inputs.size(), 2);
1435
1436 auto outputs = GetOutputs(graph, layerIndex);
1437 CHECK_VALID_SIZE(outputs.size(), 1);
1438
1439 auto layerName = GetLayerName(graph, layerIndex);
1440 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1441
1442 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1443 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1444
1445 RegisterInputSlots(graph, layerIndex, layer);
1446 RegisterOutputSlots(graph, layerIndex, layer);
1447}
1448
Jim Flynne242f2d2019-05-22 14:24:13 +01001449const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1450 unsigned int layerIndex)
1451{
1452 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1453
1454 switch (layerType)
1455 {
1456 case Layer::Layer_ConcatLayer:
1457 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1458 case Layer::Layer_MergerLayer:
1459 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1460 default:
1461 throw armnn::Exception("unknown layer type, should be concat or merger");
1462 }
1463}
1464
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001465void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1466{
1467 CHECK_LAYERS(graph, 0, layerIndex);
1468 CHECK_LOCATION();
1469
1470 auto inputs = GetInputs(graph, layerIndex);
1471 CHECK_VALID_SIZE(inputs.size(), 2);
1472
1473 auto outputs = GetOutputs(graph, layerIndex);
1474 CHECK_VALID_SIZE(outputs.size(), 1);
1475
1476 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1477 auto fbDescriptor = fbLayer->descriptor();
1478
1479 armnn::ComparisonDescriptor descriptor;
1480 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1481
1482 const std::string& layerName = GetLayerName(graph, layerIndex);
1483 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1484
1485 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1486 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1487
1488 RegisterInputSlots(graph, layerIndex, layer);
1489 RegisterOutputSlots(graph, layerIndex, layer);
1490}
1491
Jim Flynn906f9462019-05-10 13:55:21 +01001492void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001493{
1494 CHECK_LAYERS(graph, 0, layerIndex);
1495 CHECK_LOCATION();
1496
1497 auto outputs = GetOutputs(graph, layerIndex);
1498 CHECK_VALID_SIZE(outputs.size(), 1);
1499
Jim Flynnac25a1b2019-02-28 10:40:49 +00001500 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001501 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1502 unsigned int numViews = originsDescriptor->numViews();
1503 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001504
1505 // can now check the number of inputs == number of views
1506 auto inputs = GetInputs(graph, layerIndex);
1507 CHECK_VALID_SIZE(inputs.size(), numViews);
1508
1509 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001510 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001511 for (unsigned int v = 0; v < numViews; ++v)
1512 {
1513 auto originPtr = originsPtr->Get(v);
1514 for (unsigned int d = 0; d < numDimensions; ++d)
1515 {
1516 uint32_t value = originPtr->data()->Get(d);
1517 descriptor.SetViewOriginCoord(v, d, value);
1518 }
1519 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001520 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001521
Jim Flynn906f9462019-05-10 13:55:21 +01001522 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001523 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1524 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1525
1526 RegisterInputSlots(graph, layerIndex, layer);
1527 RegisterOutputSlots(graph, layerIndex, layer);
1528}
1529
Derek Lamberti8ddae332019-02-21 16:29:43 +00001530void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001531{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001532 CHECK_LAYERS(graph, 0, layerIndex);
1533 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001534 CHECK_LOCATION();
1535 CHECK_VALID_SIZE(inputs.size(), 2);
1536
Derek Lamberti8ddae332019-02-21 16:29:43 +00001537 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001538 CHECK_VALID_SIZE(outputs.size(), 1);
1539
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001540 auto layerName = GetLayerName(graph, layerIndex);
1541 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001542
1543 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1544 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1545
Derek Lamberti8ddae332019-02-21 16:29:43 +00001546 RegisterInputSlots(graph, layerIndex, layer);
1547 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001548}
1549
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001550void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1551{
1552 CHECK_LAYERS(graph, 0, layerIndex);
1553 CHECK_LOCATION();
1554
1555 auto inputs = GetInputs(graph, layerIndex);
1556 CHECK_VALID_SIZE(inputs.size(), 1);
1557
1558 auto outputs = GetOutputs(graph, layerIndex);
1559 CHECK_VALID_SIZE(outputs.size(), 1);
1560
1561 auto layerName = GetLayerName(graph, layerIndex);
1562
1563 armnn::IConnectableLayer* layer;
1564
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001565 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001566
1567 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1568 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1569
1570 RegisterInputSlots(graph, layerIndex, layer);
1571 RegisterOutputSlots(graph, layerIndex, layer);
1572}
1573
Derek Lamberti8ddae332019-02-21 16:29:43 +00001574void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001575{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001576 CHECK_LAYERS(graph, 0, layerIndex);
1577 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001578 CHECK_LOCATION();
1579 CHECK_VALID_SIZE(inputs.size(), 1);
1580
Derek Lamberti8ddae332019-02-21 16:29:43 +00001581 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001582 CHECK_VALID_SIZE(outputs.size(), 1);
1583
Derek Lamberti8ddae332019-02-21 16:29:43 +00001584 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001585 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001586 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1587
1588 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1589 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1590 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1591
1592 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1593
1594 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001595 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001596 if (flatBufferDescriptor->biasEnabled())
1597 {
1598 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001599 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001600 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001601 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1602 weightsTensor,
1603 optionalBiases,
1604 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001605
1606 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1607 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1608
Derek Lamberti8ddae332019-02-21 16:29:43 +00001609 RegisterInputSlots(graph, layerIndex, layer);
1610 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001611}
1612
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001613void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1614{
1615 CHECK_LAYERS(graph, 0, layerIndex);
1616
1617 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1618 CHECK_VALID_SIZE(inputs.size(), 1);
1619
1620 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1621 CHECK_VALID_SIZE(outputs.size(), 1);
1622
1623 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1624 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001625 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001626
1627 if (flatBufferPadList->Length() % 2 != 0)
1628 {
1629 throw ParseException(boost::str(
1630 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1631 }
1632
1633 std::vector<std::pair<unsigned int, unsigned int>> padList;
1634 padList.reserve(flatBufferPadList->Length() / 2);
1635 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1636 {
1637 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1638 }
1639
David Monahan34757812019-06-19 11:47:21 +01001640 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001641
1642 auto layerName = GetLayerName(graph, layerIndex);
1643 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1644
1645 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1646 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1647
1648 RegisterInputSlots(graph, layerIndex, layer);
1649 RegisterOutputSlots(graph, layerIndex, layer);
1650}
1651
Derek Lamberti8ddae332019-02-21 16:29:43 +00001652void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001653{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001654 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001655
1656 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001657 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001658
Derek Lamberti8ddae332019-02-21 16:29:43 +00001659 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001660 CHECK_VALID_SIZE(inputs.size(), 1);
1661
Derek Lamberti8ddae332019-02-21 16:29:43 +00001662 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001663 CHECK_VALID_SIZE(outputs.size(), 1);
1664 auto outputInfo = ToTensorInfo(outputs[0]);
1665
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001666 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001667 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1668
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001669 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001670 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1671
Derek Lamberti8ddae332019-02-21 16:29:43 +00001672 RegisterInputSlots(graph, layerIndex, layer);
1673 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001674}
1675
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001676armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001677 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001678{
1679 armnn::Pooling2dDescriptor desc;
1680
1681 switch (pooling2dDesc->poolType())
1682 {
1683 case PoolingAlgorithm_Average:
1684 {
1685 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001686 break;
1687 }
1688 case PoolingAlgorithm_Max:
1689 {
1690 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001691 break;
1692 }
1693 default:
1694 {
1695 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1696 }
1697 }
1698
1699 switch (pooling2dDesc->outputShapeRounding())
1700 {
1701 case OutputShapeRounding_Floor:
1702 {
1703 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1704 break;
1705 }
1706 case OutputShapeRounding_Ceiling:
1707 {
1708 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1709 break;
1710 }
1711 default:
1712 {
1713 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1714 }
1715 }
1716
1717 switch (pooling2dDesc->paddingMethod())
1718 {
1719 case PaddingMethod_Exclude:
1720 {
1721 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1722 break;
1723 }
1724 case PaddingMethod_IgnoreValue:
1725 {
1726 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1727 break;
1728 }
1729 default:
1730 {
1731 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1732 }
1733 }
1734
1735 switch (pooling2dDesc->dataLayout())
1736 {
1737 case DataLayout_NCHW:
1738 {
1739 desc.m_DataLayout = armnn::DataLayout::NCHW;
1740 break;
1741 }
1742 case DataLayout_NHWC:
1743 {
1744 desc.m_DataLayout = armnn::DataLayout::NHWC;
1745 break;
1746 }
1747 default:
1748 {
1749 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1750 }
1751 }
1752
1753 desc.m_PadRight = pooling2dDesc->padRight();
1754 desc.m_PadLeft = pooling2dDesc->padLeft();
1755 desc.m_PadBottom = pooling2dDesc->padBottom();
1756 desc.m_PadTop = pooling2dDesc->padTop();
1757 desc.m_StrideX = pooling2dDesc->strideX();
1758 desc.m_StrideY = pooling2dDesc->strideY();
1759 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1760 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1761
1762 return desc;
1763}
1764
Derek Lamberti8ddae332019-02-21 16:29:43 +00001765void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001766{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001767 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001768
Derek Lamberti8ddae332019-02-21 16:29:43 +00001769 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001770 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001771 CHECK_VALID_SIZE(inputs.size(), 1);
1772
Derek Lamberti8ddae332019-02-21 16:29:43 +00001773 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001774 CHECK_VALID_SIZE(outputs.size(), 1);
1775 auto outputInfo = ToTensorInfo(outputs[0]);
1776
1777 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001778 auto layerName = GetLayerName(graph, layerIndex);
1779 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001780 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1781
Derek Lamberti8ddae332019-02-21 16:29:43 +00001782 RegisterInputSlots(graph, layerIndex, layer);
1783 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001784}
1785
Derek Lamberti87acb272019-03-27 16:51:31 +00001786void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1787{
1788 CHECK_LAYERS(graph, 0, layerIndex);
1789
1790 auto inputs = GetInputs(graph, layerIndex);
1791 CHECK_VALID_SIZE(inputs.size(), 1);
1792
1793 auto outputs = GetOutputs(graph, layerIndex);
1794 CHECK_VALID_SIZE(outputs.size(), 1);
1795 auto outputInfo = ToTensorInfo(outputs[0]);
1796
1797 auto layerName = GetLayerName(graph, layerIndex);
1798 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1799 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1800
1801 RegisterInputSlots(graph, layerIndex, layer);
1802 RegisterOutputSlots(graph, layerIndex, layer);
1803}
1804
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001805armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001806 const std::vector<uint32_t>& targetDimsIn)
1807{
1808 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1809 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1810
1811 if (stretchDim != targetDimsIn.end())
1812 {
1813 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1814 {
1815 throw ParseException(boost::str(
1816 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1817 }
1818
1819 auto targetNumElements =
1820 boost::numeric_cast<unsigned int>(
1821 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1822
1823 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1824 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1825 }
1826
1827 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1828
1829 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1830 reshapeInfo.SetShape(outputShape);
1831
1832 return reshapeInfo;
1833}
1834
Derek Lamberti8ddae332019-02-21 16:29:43 +00001835void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001836{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001837 CHECK_LAYERS(graph, 0, layerIndex);
1838 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001839
Derek Lamberti8ddae332019-02-21 16:29:43 +00001840 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001841 CHECK_VALID_SIZE(outputs.size(), 1);
1842
1843 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1844 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1845
Derek Lamberti8ddae332019-02-21 16:29:43 +00001846 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001847 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1848
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001849 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001850 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1851
1852 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1853 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1854
1855 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1856 {
1857 std::stringstream ss;
1858 ss << "New shape defined in reshape parameters "
1859 << reshapeOutputTensorShape
1860 << " does not equal output shape "
1861 << actualOutputTensorInfo.GetShape()
1862 << ": "
1863 << CHECK_LOCATION().AsString();
1864 throw ParseException(ss.str());
1865 }
1866
1867 armnn::ReshapeDescriptor reshapeDesc;
1868 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1869
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001870 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001871 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1872 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1873
Derek Lamberti8ddae332019-02-21 16:29:43 +00001874 RegisterInputSlots(graph, layerIndex, layer);
1875 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001876}
1877
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01001878void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
1879{
1880 CHECK_LAYERS(graph, 0, layerIndex);
1881
1882 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1883 CHECK_VALID_SIZE(inputs.size(), 1);
1884
1885 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1886 CHECK_VALID_SIZE(outputs.size(), 1);
1887
1888 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
1889
1890 armnn::ResizeDescriptor descriptor;
1891 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1892 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1893 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
1894 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1895
1896 auto layerName = GetLayerName(graph, layerIndex);
1897 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
1898
1899 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1900 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1901
1902 RegisterInputSlots(graph, layerIndex, layer);
1903 RegisterOutputSlots(graph, layerIndex, layer);
1904}
1905
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001906void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1907{
1908 CHECK_LAYERS(graph, 0, layerIndex);
1909
1910 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1911 CHECK_VALID_SIZE(inputs.size(), 1);
1912
1913 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1914 CHECK_VALID_SIZE(outputs.size(), 1);
1915
1916 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1917
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001918 armnn::ResizeDescriptor descriptor;
1919 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001920 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001921 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
1922 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001923
1924 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001925 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001926
1927 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1928 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1929
1930 RegisterInputSlots(graph, layerIndex, layer);
1931 RegisterOutputSlots(graph, layerIndex, layer);
1932}
1933
Derek Lamberti8ddae332019-02-21 16:29:43 +00001934void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001935{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001936 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001937
Derek Lamberti8ddae332019-02-21 16:29:43 +00001938 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001939 CHECK_VALID_SIZE(inputs.size(), 1);
1940
Derek Lamberti8ddae332019-02-21 16:29:43 +00001941 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001942 CHECK_VALID_SIZE(outputs.size(), 1);
1943
1944 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001945 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001946 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001947
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001948 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1949
1950 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1951 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1952
Derek Lamberti8ddae332019-02-21 16:29:43 +00001953 RegisterInputSlots(graph, layerIndex, layer);
1954 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001955}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001956
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001957void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1958{
1959 CHECK_LAYERS(graph, 0, layerIndex);
1960
1961 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1962 CHECK_VALID_SIZE(inputs.size(), 1);
1963
1964 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1965 CHECK_VALID_SIZE(outputs.size(), 1);
1966
1967 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1968 auto flatBufferPadList = flatBufferDescriptor->padList();
1969 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1970
1971 if (flatBufferPadList->Length() % 2 != 0)
1972 {
1973 throw ParseException(boost::str(
1974 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1975 }
1976
1977 std::vector<std::pair<unsigned int, unsigned int>> padList;
1978 padList.reserve(flatBufferPadList->Length() / 2);
1979 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1980 {
1981 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1982 }
1983
1984 armnn::SpaceToBatchNdDescriptor descriptor;
1985 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1986 descriptor.m_BlockShape =
1987 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1988 descriptor.m_PadList = padList;
1989
1990 auto layerName = GetLayerName(graph, layerIndex);
1991 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1992
1993 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1994 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1995
1996 RegisterInputSlots(graph, layerIndex, layer);
1997 RegisterOutputSlots(graph, layerIndex, layer);
1998}
1999
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002000void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2001{
2002 CHECK_LAYERS(graph, 0, layerIndex);
2003
2004 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2005 CHECK_VALID_SIZE(inputs.size(), 1);
2006
2007 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2008 CHECK_VALID_SIZE(outputs.size(), 1);
2009
2010 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2011
2012 armnn::SpaceToDepthDescriptor descriptor;
2013 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2014 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2015
2016 auto layerName = GetLayerName(graph, layerIndex);
2017 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2018
2019 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2020 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2021
2022 RegisterInputSlots(graph, layerIndex, layer);
2023 RegisterOutputSlots(graph, layerIndex, layer);
2024}
2025
Nina Drozd57728782019-02-27 10:53:27 +00002026armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2027 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2028 unsigned int layerIndex)
2029{
2030 armnn::NormalizationDescriptor desc;
2031
2032 switch (normalizationDescriptor->normChannelType())
2033 {
2034 case NormalizationAlgorithmChannel_Across:
2035 {
2036 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2037 break;
2038 }
2039 case NormalizationAlgorithmChannel_Within:
2040 {
2041 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2042 break;
2043 }
2044 default:
2045 {
2046 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
2047 }
2048 }
2049
2050 switch (normalizationDescriptor->normMethodType())
2051 {
2052 case NormalizationAlgorithmMethod_LocalBrightness:
2053 {
2054 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2055 break;
2056 }
2057 case NormalizationAlgorithmMethod_LocalContrast:
2058 {
2059 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2060 break;
2061 }
2062 default:
2063 {
2064 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
2065 }
2066 }
2067
2068 switch (normalizationDescriptor->dataLayout())
2069 {
2070 case DataLayout_NCHW:
2071 {
2072 desc.m_DataLayout = armnn::DataLayout::NCHW;
2073 break;
2074 }
2075 case DataLayout_NHWC:
2076 {
2077 desc.m_DataLayout = armnn::DataLayout::NHWC;
2078 break;
2079 }
2080 default:
2081 {
2082 BOOST_ASSERT_MSG(false, "Unsupported data layout");
2083 }
2084 }
2085
2086 desc.m_Alpha = normalizationDescriptor->alpha();
2087 desc.m_Beta = normalizationDescriptor->beta();
2088 desc.m_K = normalizationDescriptor->k();
2089 desc.m_NormSize = normalizationDescriptor->normSize();
2090
2091 return desc;
2092}
2093
2094void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2095{
2096 CHECK_LAYERS(graph, 0, layerIndex);
2097
2098 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2099
2100 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2101 CHECK_VALID_SIZE(inputs.size(), 1);
2102
2103 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2104 CHECK_VALID_SIZE(outputs.size(), 1);
2105
2106 auto outputInfo = ToTensorInfo(outputs[0]);
2107
2108 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2109 auto layerName = GetLayerName(graph, layerIndex);
2110
2111 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2112 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2113
2114 RegisterInputSlots(graph, layerIndex, layer);
2115 RegisterOutputSlots(graph, layerIndex, layer);
2116}
2117
Sadik Armagan8b42a382019-03-01 14:24:49 +00002118void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2119{
2120 CHECK_LAYERS(graph, 0, layerIndex);
2121 auto inputs = GetInputs(graph, layerIndex);
2122 CHECK_LOCATION();
2123 CHECK_VALID_SIZE(inputs.size(), 1);
2124
2125 auto outputs = GetOutputs(graph, layerIndex);
2126 CHECK_VALID_SIZE(outputs.size(), 1);
2127
2128 auto layerName = GetLayerName(graph, layerIndex);
2129 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
2130
2131 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2132 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2133
2134 RegisterInputSlots(graph, layerIndex, layer);
2135 RegisterOutputSlots(graph, layerIndex, layer);
2136}
2137
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002138void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2139{
2140 CHECK_LAYERS(graph, 0, layerIndex);
2141
2142 auto inputs = GetInputs(graph, layerIndex);
2143 CHECK_VALID_SIZE(inputs.size(), 1);
2144
2145 auto outputs = GetOutputs(graph, layerIndex);
2146 CHECK_VALID_SIZE(outputs.size(), 1);
2147
2148 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2149
2150 auto fbBegin = fbDescriptor->begin();
2151 auto fbSize = fbDescriptor->size();
2152
2153 if (fbBegin->Length() != fbSize->Length())
2154 {
2155 throw ParseException(boost::str(
2156 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2157 }
2158
2159 armnn::SliceDescriptor descriptor;
2160 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2161 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2162
2163 auto layerName = GetLayerName(graph, layerIndex);
2164 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2165
2166 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2167 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2168
2169 RegisterInputSlots(graph, layerIndex, layer);
2170 RegisterOutputSlots(graph, layerIndex, layer);
2171}
2172
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002173void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2174{
2175 CHECK_LAYERS(graph, 0, layerIndex);
2176
2177 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2178 CHECK_VALID_SIZE(inputs.size(), 1);
2179
2180 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2181 CHECK_VALID_SIZE(outputs.size(), 1);
2182
2183 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2184
2185 auto flatBufferBegin = flatBufferDescriptor->begin();
2186 auto flatBufferEnd = flatBufferDescriptor->end();
2187 auto flatBufferStride = flatBufferDescriptor->stride();
2188
2189 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2190 flatBufferBegin->Length() == flatBufferStride->Length()))
2191 {
2192 throw ParseException(boost::str(
2193 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2194 }
2195
2196 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2197 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2198 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2199
2200 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2201 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2202 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2203 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2204 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2205 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2206 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2207
2208 auto layerName = GetLayerName(graph, layerIndex);
2209 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2210
2211 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2212 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2213
2214 RegisterInputSlots(graph, layerIndex, layer);
2215 RegisterOutputSlots(graph, layerIndex, layer);
2216}
2217
Conor Kennedyda1f9752019-03-01 14:37:12 +00002218void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2219{
2220 CHECK_LAYERS(graph, 0, layerIndex);
2221 auto inputs = GetInputs(graph, layerIndex);
2222 CHECK_LOCATION();
2223 CHECK_VALID_SIZE(inputs.size(), 2);
2224
2225 auto outputs = GetOutputs(graph, layerIndex);
2226 CHECK_VALID_SIZE(outputs.size(), 1);
2227
2228 auto layerName = GetLayerName(graph, layerIndex);
2229 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2230
2231 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2232 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2233
2234 RegisterInputSlots(graph, layerIndex, layer);
2235 RegisterOutputSlots(graph, layerIndex, layer);
2236}
2237
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002238void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2239{
2240 CHECK_LAYERS(graph, 0, layerIndex);
2241
2242 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2243 CHECK_VALID_SIZE(inputs.size(), 2);
2244
2245 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2246 CHECK_VALID_SIZE(outputs.size(), 1);
2247
2248 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002249 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2250
2251 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002252 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2253
2254 RegisterInputSlots(graph, layerIndex, layer);
2255 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002256}
2257
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002258void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2259{
2260 CHECK_LAYERS(graph, 0, layerIndex);
2261
2262 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2263 CHECK_VALID_SIZE(inputs.size(), 1);
2264
2265 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2266 CHECK_VALID_SIZE(outputs.size(), 1);
2267
2268 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2269 auto flatBufferAxis = flatBufferDescriptor->axis();
2270 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2271
2272 armnn::MeanDescriptor descriptor;
2273 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2274 descriptor.m_KeepDims = flatBufferKeepDims;
2275
2276 auto layerName = GetLayerName(graph, layerIndex);
2277 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2278
2279 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2280 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2281
2282 RegisterInputSlots(graph, layerIndex, layer);
2283 RegisterOutputSlots(graph, layerIndex, layer);
2284}
2285
Jim Flynn18ce3382019-03-08 11:08:30 +00002286void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2287{
2288 CHECK_LAYERS(graph, 0, layerIndex);
2289
2290 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2291 CHECK_VALID_SIZE(inputs.size(), 1);
2292
2293 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2294
2295 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2296 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2297 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2298 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2299 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2300 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2301
2302 // Check numViews and numDimensions corresponds to the ones already serialized ...
2303 // numViews == flatBufferViewSizes.size();
2304 // foreach: numDimensions == flatBufferViewSizes[x].size();
2305
2306 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2307 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2308 {
2309 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2310 {
2311 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2312 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2313 }
2314 }
2315
2316 auto layerName = GetLayerName(graph, layerIndex);
2317 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2318
2319 // I could have as many outputs as views ...
2320 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2321 {
2322 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2323 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2324 }
2325
2326 RegisterInputSlots(graph, layerIndex, layer);
2327 RegisterOutputSlots(graph, layerIndex, layer);
2328}
2329
Jim Flynn11af3752019-03-19 17:22:29 +00002330armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2331{
2332 armnn::LstmDescriptor desc;
2333
2334 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2335 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2336 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2337 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2338 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2339 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002340 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002341
2342 return desc;
2343}
2344
2345void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2346{
2347 CHECK_LAYERS(graph, 0, layerIndex);
2348
2349 auto inputs = GetInputs(graph, layerIndex);
2350 CHECK_VALID_SIZE(inputs.size(), 3);
2351
2352 auto outputs = GetOutputs(graph, layerIndex);
2353 CHECK_VALID_SIZE(outputs.size(), 4);
2354
2355 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2356 auto layerName = GetLayerName(graph, layerIndex);
2357 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2358 auto flatBufferInputParams = flatBufferLayer->inputParams();
2359
2360 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2361
2362 armnn::LstmInputParams lstmInputParams;
2363
2364 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2365 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2366 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2367 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2368 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2369 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2370 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2371 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2372 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2373
2374 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2375 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2376 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2377 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2378 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2379 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2380 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2381 lstmInputParams.m_CellBias = &cellBias;
2382 lstmInputParams.m_OutputGateBias = &outputGateBias;
2383
2384 armnn::ConstTensor inputToInputWeights;
2385 armnn::ConstTensor recurrentToInputWeights;
2386 armnn::ConstTensor cellToInputWeights;
2387 armnn::ConstTensor inputGateBias;
2388 if (!lstmDescriptor.m_CifgEnabled)
2389 {
2390 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2391 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2392 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2393 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2394
2395 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2396 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2397 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2398 lstmInputParams.m_InputGateBias = &inputGateBias;
2399 }
2400
2401 armnn::ConstTensor projectionWeights;
2402 armnn::ConstTensor projectionBias;
2403 if (lstmDescriptor.m_ProjectionEnabled)
2404 {
2405 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2406 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2407
2408 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2409 lstmInputParams.m_ProjectionBias = &projectionBias;
2410 }
2411
2412 armnn::ConstTensor cellToForgetWeights;
2413 armnn::ConstTensor cellToOutputWeights;
2414 if (lstmDescriptor.m_PeepholeEnabled)
2415 {
2416 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2417 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2418
2419 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2420 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2421 }
2422
Jan Eilersf8c62972019-07-17 11:07:49 +01002423 armnn::ConstTensor inputLayerNormWeights;
2424 armnn::ConstTensor forgetLayerNormWeights;
2425 armnn::ConstTensor cellLayerNormWeights;
2426 armnn::ConstTensor outputLayerNormWeights;
2427 if (lstmDescriptor.m_LayerNormEnabled)
2428 {
2429 if (!lstmDescriptor.m_CifgEnabled)
2430 {
2431 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2432 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2433 }
2434 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2435 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2436 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2437
2438 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2439 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2440 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2441 }
2442
Jim Flynn11af3752019-03-19 17:22:29 +00002443 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2444
2445 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2446 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2447
2448 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2449 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2450
2451 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2452 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2453
2454 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2455 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2456
2457 RegisterInputSlots(graph, layerIndex, layer);
2458 RegisterOutputSlots(graph, layerIndex, layer);
2459}
2460
Jan Eilers5b01a892019-07-23 09:47:43 +01002461void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2462{
2463 CHECK_LAYERS(graph, 0, layerIndex);
2464
2465 auto inputs = GetInputs(graph, layerIndex);
2466 CHECK_VALID_SIZE(inputs.size(), 3);
2467
2468 auto outputs = GetOutputs(graph, layerIndex);
2469 CHECK_VALID_SIZE(outputs.size(), 2);
2470
2471 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2472 auto layerName = GetLayerName(graph, layerIndex);
2473 auto flatBufferInputParams = flatBufferLayer->inputParams();
2474
2475 armnn::QuantizedLstmInputParams lstmInputParams;
2476
2477 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2478 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2479 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2480 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2481 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2482 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2483 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2484 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2485 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2486 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2487 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2488 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2489
2490 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2491 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2492 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2493 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2494 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2495 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2496 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2497 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2498 lstmInputParams.m_InputGateBias = &inputGateBias;
2499 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2500 lstmInputParams.m_CellBias = &cellBias;
2501 lstmInputParams.m_OutputGateBias = &outputGateBias;
2502
2503 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2504
2505 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2506 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2507
2508 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2509 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2510
2511 RegisterInputSlots(graph, layerIndex, layer);
2512 RegisterOutputSlots(graph, layerIndex, layer);
2513}
2514
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002515void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2516{
2517 CHECK_LAYERS(graph, 0, layerIndex);
2518
2519 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2520 CHECK_VALID_SIZE(inputs.size(), 1);
2521
2522 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2523 CHECK_VALID_SIZE(outputs.size(), 1);
2524
2525 const std::string layerName = GetLayerName(graph, layerIndex);
2526 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2527
2528 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2529 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2530
2531 RegisterInputSlots(graph, layerIndex, layer);
2532 RegisterOutputSlots(graph, layerIndex, layer);
2533}
2534
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002535void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2536{
2537 CHECK_LAYERS(graph, 0, layerIndex);
2538
2539 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2540 CHECK_VALID_SIZE(inputs.size(), 2);
2541
2542 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2543 CHECK_VALID_SIZE(outputs.size(), 1);
2544
2545 const std::string layerName = GetLayerName(graph, layerIndex);
2546 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2547
2548 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2549 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2550
2551 RegisterInputSlots(graph, layerIndex, layer);
2552 RegisterOutputSlots(graph, layerIndex, layer);
2553}
2554
Sadik Armaganeff363d2019-04-05 15:25:46 +01002555void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2556{
2557 CHECK_LAYERS(graph, 0, layerIndex);
2558 auto inputs = GetInputs(graph, layerIndex);
2559 CHECK_LOCATION();
2560 CHECK_VALID_SIZE(inputs.size(), 2);
2561
2562 auto outputs = GetOutputs(graph, layerIndex);
2563 CHECK_VALID_SIZE(outputs.size(), 2);
2564
2565 auto layerName = GetLayerName(graph, layerIndex);
2566 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2567
2568 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2569 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2570
2571 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2572 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2573
2574 RegisterInputSlots(graph, layerIndex, layer);
2575 RegisterOutputSlots(graph, layerIndex, layer);
2576}
2577
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002578void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2579{
2580 CHECK_LAYERS(graph, 0, layerIndex);
2581 auto inputs = GetInputs(graph, layerIndex);
2582 CHECK_LOCATION();
2583 CHECK_VALID_SIZE(inputs.size(), 2);
2584
2585 auto outputs = GetOutputs(graph, layerIndex);
2586 CHECK_VALID_SIZE(outputs.size(), 1);
2587
2588 auto layerName = GetLayerName(graph, layerIndex);
2589 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2590
2591 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2592 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2593
2594 RegisterInputSlots(graph, layerIndex, layer);
2595 RegisterOutputSlots(graph, layerIndex, layer);
2596}
2597
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002598void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2599{
2600 CHECK_LAYERS(graph, 0, layerIndex);
2601
2602 auto inputs = GetInputs(graph, layerIndex);
2603 CHECK_VALID_SIZE(inputs.size(), 1);
2604
2605 auto outputs = GetOutputs(graph, layerIndex);
2606 CHECK_VALID_SIZE(outputs.size(), 1);
2607
2608 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2609 auto layerName = GetLayerName(graph, layerIndex);
2610 auto serializerDescriptor = serializerLayer->descriptor();
2611
2612 armnn::TransposeConvolution2dDescriptor descriptor;
2613 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2614 descriptor.m_PadRight = serializerDescriptor->padRight();
2615 descriptor.m_PadTop = serializerDescriptor->padTop();
2616 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2617 descriptor.m_StrideX = serializerDescriptor->strideX();
2618 descriptor.m_StrideY = serializerDescriptor->strideY();;
2619 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2620 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2621
2622 // weights & biases
2623 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2624 armnn::Optional<armnn::ConstTensor> optionalBiases;
2625 if (descriptor.m_BiasEnabled)
2626 {
2627 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2628 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2629 }
2630
2631 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2632 weights,
2633 optionalBiases,
2634 layerName.c_str());
2635
2636 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2637 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2638
2639 RegisterInputSlots(graph, layerIndex, layer);
2640 RegisterOutputSlots(graph, layerIndex, layer);
2641}
2642
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002643void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2644{
2645 CHECK_LAYERS(graph, 0, layerIndex);
2646 auto inputs = GetInputs(graph, layerIndex);
2647
2648 auto outputs = GetOutputs(graph, layerIndex);
2649 CHECK_VALID_SIZE(outputs.size(), 1);
2650
2651 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2652 unsigned int axis = flatBufferDescriptor->axis();
2653 unsigned int numInputs = flatBufferDescriptor->numInputs();
2654 CHECK_VALID_SIZE(inputs.size(), numInputs);
2655
2656 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2657 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2658 flatBufferInputShape->begin() + flatBufferInputShape->size());
2659
2660 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2661 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2662
2663 for (unsigned int i=0; i<inputs.size(); ++i)
2664 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002665 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002666 if (descriptor.m_InputShape != inputShape)
2667 {
2668 std::stringstream ss;
2669 ss << "Shape of input "
2670 << i
2671 << " "
2672 << inputShape
2673 << " does not equal defined input shape "
2674 << descriptor.m_InputShape
2675 << ": "
2676 << CHECK_LOCATION().AsString();
2677 throw ParseException(ss.str());
2678 }
2679 }
2680
2681 auto layerName = GetLayerName(graph, layerIndex);
2682 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2683
2684 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2685 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2686
2687 RegisterInputSlots(graph, layerIndex, layer);
2688 RegisterOutputSlots(graph, layerIndex, layer);
2689}
2690
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01002691void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
2692{
2693 CHECK_LAYERS(graph, 0, layerIndex);
2694
2695 auto inputs = GetInputs(graph, layerIndex);
2696 auto outputs = GetOutputs(graph, layerIndex);
2697
2698 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
2699 auto fbDescriptor = fbLayer->descriptor();
2700
2701 armnn::StandInDescriptor descriptor;
2702 descriptor.m_NumInputs = fbDescriptor->numInputs();
2703 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
2704
2705 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
2706 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
2707
2708 const std::string layerName = GetLayerName(graph, layerIndex);
2709 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
2710
2711 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
2712 {
2713 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
2714 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
2715 }
2716
2717 RegisterInputSlots(graph, layerIndex, layer);
2718 RegisterOutputSlots(graph, layerIndex, layer);
2719}
2720
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002721} // namespace armnnDeserializer