blob: fe0539bac5a503454340b06764194e4808ae3f8a [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
Michele Di Giorgioee82d342021-01-05 16:14:28 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasd8734b52017-12-22 15:27:52 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_LAYERS_H
25#define ARM_COMPUTE_GRAPH_LAYERS_H
Georgios Pinitasd8734b52017-12-22 15:27:52 +000026
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010027#include "arm_compute/graph/GraphBuilder.h"
28#include "arm_compute/graph/Types.h"
29#include "arm_compute/graph/frontend/ILayer.h"
30#include "arm_compute/graph/frontend/IStream.h"
31#include "arm_compute/graph/frontend/SubStream.h"
Georgios Pinitasd8734b52017-12-22 15:27:52 +000032
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010040namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +000041{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010060 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000061 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
Michalis Spyrou1a569a32019-09-10 17:20:34 +010069/** Constant Layer */
70class ConstantLayer final : public ILayer
71{
72public:
73 /** Construct a constant layer.
74 *
75 * @param[in] desc Description of input tensor.
76 * @param[in] accessor Accessor to get input tensor data from.
77 */
78 ConstantLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
79 : _desc(desc), _accessor(std::move(accessor))
80 {
81 }
82
83 NodeID create_layer(IStream &s) override
84 {
85 NodeParams common_params = { name(), s.hints().target_hint };
86 return GraphBuilder::add_const_node(s.graph(), common_params, _desc, std::move(_accessor));
87 }
88
89private:
90 TensorDescriptor _desc;
91 ITensorAccessorUPtr _accessor;
92};
93
Georgios Pinitasd8734b52017-12-22 15:27:52 +000094/** Output Layer */
95class OutputLayer final : public ILayer
96{
97public:
Alex Gildayc357c472018-03-21 13:54:09 +000098 /** Construct an output layer.
99 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000100 * @param[in] accessor Accessor to give output tensor data to.
101 * @param[in] connection_idx (Optional) Input connection index
Alex Gildayc357c472018-03-21 13:54:09 +0000102 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000103 OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx = 0)
104 : _accessor(std::move(accessor)), _connection_idx(connection_idx)
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000105 {
106 }
107
108 NodeID create_layer(IStream &s) override
109 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100110 NodeParams common_params = { name(), s.hints().target_hint };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000111 NodeIdxPair input = { s.tail_node(), _connection_idx };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000112 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
113 }
114
115private:
116 ITensorAccessorUPtr _accessor;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000117 unsigned int _connection_idx;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000118};
119
120/** Activation Layer */
121class ActivationLayer final : public ILayer
122{
123public:
Alex Gildayc357c472018-03-21 13:54:09 +0000124 /** Construct an activation layer.
125 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000126 * @param[in] act_info Activation information
127 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000128 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000129 ActivationLayer(ActivationLayerInfo act_info,
130 const QuantizationInfo out_quant_info = QuantizationInfo())
131 : _act_info(act_info),
132 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000133 {
134 }
135
136 NodeID create_layer(IStream &s) override
137 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100138 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000139 NodeIdxPair input = { s.tail_node(), 0 };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000140 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info, std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000141 }
142
143private:
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000144 ActivationLayerInfo _act_info;
145 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000146};
147
thecha01e8f05da2020-08-24 17:21:41 +0100148/** ArgMinMax Layer */
149class ArgMinMaxLayer final : public ILayer
150{
151public:
152 /** Construct an activation layer.
153 *
154 * @param[in] op Reduction Operation: min or max
155 * @param[in] axis Axis to perform reduction along
156 * @param[in] out_data_type (Optional) Output tensor data type
157 * @param[in] out_quant_info (Optional) Output quantization info
158 */
159 ArgMinMaxLayer(ReductionOperation op,
160 unsigned int axis,
161 DataType out_data_type = DataType::UNKNOWN,
162 const QuantizationInfo out_quant_info = QuantizationInfo())
163 : _op(op),
164 _axis(axis),
165 _out_data_type(out_data_type),
166 _out_quant_info(std::move(out_quant_info))
167 {
168 }
169
170 /** Create layer and add to the given stream.
171 *
172 * @param[in] s Stream to add layer to.
173 *
174 * @return ID of the created node.
175 */
176 NodeID create_layer(IStream &s) override
177 {
178 NodeParams common_params = { name(), s.hints().target_hint };
179 NodeIdxPair input = { s.tail_node(), 0 };
180 return GraphBuilder::add_arg_min_max_node(s.graph(), common_params, input, _op, _axis, _out_data_type, std::move(_out_quant_info));
181 }
182
183private:
184 ReductionOperation _op;
185 unsigned int _axis;
186 DataType _out_data_type;
187 QuantizationInfo _out_quant_info;
188};
189
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000190/** Batchnormalization Layer */
191class BatchNormalizationLayer final : public ILayer
192{
193public:
Alex Gildayc357c472018-03-21 13:54:09 +0000194 /** Construct a batch normalization layer.
195 *
196 * @param[in] mean Accessor to get mean tensor data from.
197 * @param[in] var Accessor to get var tensor data from.
198 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
199 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
200 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
201 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000202 BatchNormalizationLayer(ITensorAccessorUPtr mean,
203 ITensorAccessorUPtr var,
204 ITensorAccessorUPtr gamma = nullptr,
205 ITensorAccessorUPtr beta = nullptr,
206 float epsilon = 0.001f)
207 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
208 {
209 }
210
211 NodeID create_layer(IStream &s) override
212 {
213 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
214 ARM_COMPUTE_ERROR_ON(_var == nullptr);
215
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100216 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000217 NodeIdxPair input = { s.tail_node(), 0 };
218 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
219 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
220 }
221
222private:
223 ITensorAccessorUPtr _mean;
224 ITensorAccessorUPtr _var;
225 ITensorAccessorUPtr _gamma;
226 ITensorAccessorUPtr _beta;
227 float _epsilon;
228};
229
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100230/** Bounding Box Transform Layer */
231class BoundingBoxTransformLayer final : public ILayer
232{
233public:
234 /** Construct a bounding box transform layer.
235 *
236 * @param[in] sub_stream_input Graph sub-stream for the input
237 * @param[in] sub_stream_deltas Graph sub-stream for the deltas
238 * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
239 */
240 BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
241 : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
242 {
243 }
244
245 /** Create layer and add to the given stream.
246 *
247 * @param[in] s Stream to add layer to.
248 *
249 * @return ID of the created node.
250 */
251 NodeID create_layer(IStream &s) override
252 {
253 NodeParams common_params = { name(), s.hints().target_hint };
254 NodeIdxPair input = { _ss_input.tail_node(), 0 };
255 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
256 return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
257 }
258
259private:
260 SubStream _ss_input;
261 SubStream _ss_deltas;
262 BoundingBoxTransformInfo _bbox_info;
263};
264
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100265/** Channel Shuffle Layer */
266class ChannelShuffleLayer final : public ILayer
267{
268public:
269 /** Construct a Channel Shuffle layer.
270 *
271 * @param[in] num_groups Number of groups
272 */
273 ChannelShuffleLayer(unsigned int num_groups)
274 : _num_groups(num_groups)
275 {
276 }
277
278 NodeID create_layer(IStream &s) override
279 {
280 NodeParams common_params = { name(), s.hints().target_hint };
281 NodeIdxPair input = { s.tail_node(), 0 };
282 return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
283 }
284
285private:
286 unsigned int _num_groups;
287};
288
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100289/** Concat Layer */
290class ConcatLayer final : public ILayer
291{
292public:
293 /** Construct a concatenation layer
294 *
295 * @param[in] sub_stream1 First graph branch
296 * @param[in] sub_stream2 Second graph branch
297 * @param[in] rest_sub_streams Rest sub-graph branches
298 */
299 template <typename... Ts>
300 ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000301 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Pablo Tello32521432018-11-15 14:43:10 +0000302 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000303 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
304 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
Pablo Tello32521432018-11-15 14:43:10 +0000305
306 utility::for_each([&](SubStream && sub_stream)
307 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000308 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
Pablo Tello32521432018-11-15 14:43:10 +0000309 },
310 std::move(rest_sub_streams)...);
311 }
312 /** Construct a concatenation layer
313 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000314 * @param[in] concat_descriptor Concat layer descriptor
315 * @param[in] sub_stream1 First graph branch
316 * @param[in] sub_stream2 Second graph branch
317 * @param[in] rest_sub_streams Rest sub-graph branches
Pablo Tello32521432018-11-15 14:43:10 +0000318 */
319 template <typename... Ts>
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000320 ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
321 : _sub_streams(), _concat_descriptor(concat_descriptor)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100322 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000323 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
324 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100325
326 utility::for_each([&](SubStream && sub_stream)
327 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000328 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100329 },
330 std::move(rest_sub_streams)...);
331 }
332 /** Construct a concat layer
333 *
334 * @param[in] sub_stream Sub-stream
335 */
336 template <typename... Ts>
337 ConcatLayer(SubStream &&sub_stream)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000338 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100339 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000340 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100341 }
342 NodeID create_layer(IStream &s) override
343 {
344 NodeID nid = EmptyNodeID;
345 NodeParams common_params = { name(), s.hints().target_hint };
346 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
347 {
348 nid = _sub_streams[0]->tail_node();
349 }
350 else
351 {
352 // Collect tail nodes and concatenate
353 std::vector<NodeIdxPair> nodes;
354 for(auto &ss : _sub_streams)
355 {
356 if(ss && (ss->tail_node() != EmptyNodeID))
357 {
358 const auto tail_node = s.graph().node(ss->tail_node());
359 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
360 {
361 nodes.push_back({ ss->tail_node(), 0 });
362 }
363 }
364 }
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000365 nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _concat_descriptor);
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100366 }
367 return nid;
368 }
369
370private:
371 std::vector<std::unique_ptr<SubStream>> _sub_streams;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000372 descriptors::ConcatLayerDescriptor _concat_descriptor;
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100373};
374
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000375/** Convolution Layer */
376class ConvolutionLayer final : public ILayer
377{
378public:
Alex Gildayc357c472018-03-21 13:54:09 +0000379 /** Construct a convolution layer.
380 *
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100381 * @param[in] conv_width Convolution width.
382 * @param[in] conv_height Convolution height.
383 * @param[in] ofm Output feature map.
384 * @param[in] weights Accessor to get kernel weights from.
385 * @param[in] bias Accessor to get kernel bias from.
386 * @param[in] conv_info Padding and stride information.
387 * @param[in] num_groups (Optional) Number of groups. Default: 1.
388 * @param[in] weights_quant_info (Optional) Weights quantization information
389 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000390 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100391 ConvolutionLayer(unsigned int conv_width,
392 unsigned int conv_height,
393 unsigned int ofm,
394 ITensorAccessorUPtr weights,
395 ITensorAccessorUPtr bias,
396 PadStrideInfo conv_info,
397 unsigned int num_groups = 1,
398 const QuantizationInfo weights_quant_info = QuantizationInfo(),
399 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000400 : _conv_width(conv_width),
401 _conv_height(conv_height),
402 _ofm(ofm),
403 _conv_info(std::move(conv_info)),
404 _num_groups(num_groups),
405 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100406 _bias(std::move(bias)),
407 _weights_quant_info(std::move(weights_quant_info)),
408 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000409 {
410 }
411
412 NodeID create_layer(IStream &s) override
413 {
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000414 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100415 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000416 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
Georgios Pinitasee33ea52018-03-08 16:01:29 +0000417 Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
Giorgio Arena59631a12018-05-02 13:59:04 +0100418 s.hints().convolution_method_hint, s.hints().fast_math_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100419 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000420 }
421
422private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100423 unsigned int _conv_width;
424 unsigned int _conv_height;
425 unsigned int _ofm;
426 const PadStrideInfo _conv_info;
427 unsigned int _num_groups;
428 ITensorAccessorUPtr _weights;
429 ITensorAccessorUPtr _bias;
430 const QuantizationInfo _weights_quant_info;
431 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000432};
433
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100434/** Deconvolution Layer */
435class DeconvolutionLayer final : public ILayer
436{
437public:
438 /** Construct a convolution layer.
439 *
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100440 * @param[in] conv_width Convolution width.
441 * @param[in] conv_height Convolution height.
442 * @param[in] ofm Output feature map.
443 * @param[in] weights Accessor to get kernel weights from.
444 * @param[in] bias Accessor to get kernel bias from.
445 * @param[in] deconv_info Padding and stride information.
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100446 */
447 DeconvolutionLayer(unsigned int conv_width,
448 unsigned int conv_height,
449 unsigned int ofm,
450 ITensorAccessorUPtr weights,
451 ITensorAccessorUPtr bias,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100452 PadStrideInfo deconv_info)
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100453 : _conv_width(conv_width),
454 _conv_height(conv_height),
455 _ofm(ofm),
456 _deconv_info(std::move(deconv_info)),
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100457 _weights(std::move(weights)),
458 _bias(std::move(bias))
459 {
460 }
461
462 NodeID create_layer(IStream &s) override
463 {
464 NodeIdxPair input = { s.tail_node(), 0 };
465 NodeParams common_params = { name(), s.hints().target_hint };
466 return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100467 Size2D(_conv_width, _conv_height), _ofm, _deconv_info,
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100468 std::move(_weights), std::move(_bias));
469 }
470
471private:
472 unsigned int _conv_width;
473 unsigned int _conv_height;
474 unsigned int _ofm;
475 const PadStrideInfo _deconv_info;
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100476 ITensorAccessorUPtr _weights;
477 ITensorAccessorUPtr _bias;
478};
479
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000480/** Depthwise Convolution Layer */
481class DepthwiseConvolutionLayer final : public ILayer
482{
483public:
Alex Gildayc357c472018-03-21 13:54:09 +0000484 /** Construct a depthwise convolution layer.
485 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000486 * @param[in] conv_width Convolution width.
487 * @param[in] conv_height Convolution height.
488 * @param[in] weights Accessor to get kernel weights from.
489 * @param[in] bias Accessor to get kernel bias from.
490 * @param[in] conv_info Padding and stride information.
491 * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
492 * @param[in] weights_quant_info (Optional) Quantization info used for weights
493 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000494 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100495 DepthwiseConvolutionLayer(unsigned int conv_width,
496 unsigned int conv_height,
497 ITensorAccessorUPtr weights,
498 ITensorAccessorUPtr bias,
499 PadStrideInfo conv_info,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000500 int depth_multiplier = 1,
501 const QuantizationInfo weights_quant_info = QuantizationInfo(),
502 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000503 : _conv_width(conv_width),
504 _conv_height(conv_height),
505 _conv_info(std::move(conv_info)),
506 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100507 _bias(std::move(bias)),
Georgios Pinitas05045c12018-12-07 18:31:47 +0000508 _depth_multiplier(depth_multiplier),
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000509 _weights_quant_info(std::move(weights_quant_info)),
510 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000511 {
512 }
513
514 NodeID create_layer(IStream &s) override
515 {
516 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100517 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000518 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000519 input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000520 s.hints().depthwise_convolution_method_hint,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000521 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000522 }
523
524private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100525 unsigned int _conv_width;
526 unsigned int _conv_height;
527 const PadStrideInfo _conv_info;
528 ITensorAccessorUPtr _weights;
529 ITensorAccessorUPtr _bias;
Georgios Pinitas05045c12018-12-07 18:31:47 +0000530 int _depth_multiplier;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000531 const QuantizationInfo _weights_quant_info;
532 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000533};
thecha010a05e6a2020-08-28 18:40:38 +0100534
535/** DepthToSpace Layer */
536class DepthToSpaceLayer final : public ILayer
537{
538public:
539 /** Construct an DepthToSpace layer.
540 *
541 * @param[in] block_shape Block size to rearranged
542 */
543 DepthToSpaceLayer(int32_t block_shape)
544 : _block_shape(block_shape)
545 {
546 }
547
548 NodeID create_layer(IStream &s) override
549 {
550 NodeParams common_params = { name(), s.hints().target_hint };
551 NodeIdxPair input = { s.tail_node(), 0 };
552 return GraphBuilder::add_depth_to_space_node(s.graph(), common_params, input, _block_shape);
553 }
554
555private:
556 int32_t _block_shape;
557};
558
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000559/** Dequantization Layer */
560class DequantizationLayer final : public ILayer
561{
562public:
563 /** Construct a dequantization layer.
564 *
565 */
566 DequantizationLayer()
567 {
568 }
569
570 NodeID create_layer(IStream &s) override
571 {
572 NodeParams common_params = { name(), s.hints().target_hint };
573 NodeIdxPair input = { s.tail_node(), 0 };
574 return GraphBuilder::add_dequantization_node(s.graph(), common_params, input);
575 }
576};
577
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000578/** DetectionOutput Layer */
579class DetectionOutputLayer final : public ILayer
580{
581public:
582 /** Construct a detection output layer.
583 *
584 * @param[in] sub_stream_conf Confidence graph sub-stream.
585 * @param[in] sub_stream_prior PriorBox graph sub-stream.
586 * @param[in] detect_info DetectionOutput parameters.
587 */
Georgios Pinitasf52cd782019-03-25 14:06:14 +0000588 DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, const DetectionOutputLayerInfo &detect_info)
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000589 : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
590 {
591 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000592
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000593 NodeID create_layer(IStream &s) override
594 {
595 NodeParams common_params = { name(), s.hints().target_hint };
596 NodeIdxPair input_loc = { s.tail_node(), 0 };
597 NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
598 NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
599 return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
600 }
601
602private:
603 SubStream _ss_conf;
604 SubStream _ss_prior;
605 DetectionOutputLayerInfo _detect_info;
606};
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000607/** DetectionOutputPostProcess Layer */
608class DetectionPostProcessLayer final : public ILayer
609{
610public:
611 /** Construct a detection output layer.
612 *
613 * @param[in] sub_stream_class_prediction Class prediction graph sub-stream.
614 * @param[in] detect_info DetectionOutput parameters.
615 * @param[in] anchors Accessor to get anchors tensor data from.
616 * @param[in] out_quant_info (Optional) Output quantization info
617 */
618 DetectionPostProcessLayer(SubStream &&sub_stream_class_prediction, DetectionPostProcessLayerInfo detect_info, ITensorAccessorUPtr anchors,
619 const QuantizationInfo out_quant_info = QuantizationInfo())
620 : _sub_stream_class_prediction(std::move(sub_stream_class_prediction)), _detect_info(detect_info), _anchors(std::move(anchors)), _out_quant_info(std::move(out_quant_info))
621 {
622 }
623
624 NodeID create_layer(IStream &s) override
625 {
626 ARM_COMPUTE_ERROR_ON(_anchors == nullptr);
627
628 NodeParams common_params = { name(), s.hints().target_hint };
629 NodeIdxPair input_box_encoding = { s.tail_node(), 0 };
630 NodeIdxPair input_class_prediction = { _sub_stream_class_prediction.tail_node(), 0 };
631 return GraphBuilder::add_detection_post_process_node(s.graph(), common_params, input_box_encoding, input_class_prediction, _detect_info, std::move(_anchors), std::move(_out_quant_info));
632 }
633
634private:
635 SubStream _sub_stream_class_prediction;
636 DetectionPostProcessLayerInfo _detect_info;
637 ITensorAccessorUPtr _anchors;
638 const QuantizationInfo _out_quant_info;
639};
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100640/** Dummy Layer */
641class DummyLayer final : public ILayer
642{
643public:
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000644 /** Construct a dummy layer.
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100645 *
646 * @param[in] shape Output shape
647 */
648 DummyLayer(TensorShape shape)
649 : _shape(shape)
650 {
651 }
652
653 NodeID create_layer(IStream &s) override
654 {
655 NodeParams common_params = { name(), s.hints().target_hint };
656 NodeIdxPair input = { s.tail_node(), 0 };
657 return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
658 }
659
660private:
661 TensorShape _shape;
662};
663
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100664class EltwiseLayer final : public ILayer
665{
666public:
667 /** Construct an element-wise operation layer
668 *
669 * @param[in] sub_stream0 First graph sub-stream
670 * @param[in] sub_stream1 First graph sub-stream
671 * @param[in] op Element-wise operation to perform
672 */
673 EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
674 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
675 {
676 }
677
678 NodeID create_layer(IStream &s) override
679 {
680 NodeParams common_params = { name(), s.hints().target_hint };
681 NodeIdxPair input0 = { _ss0.tail_node(), 0 };
682 NodeIdxPair input1 = { _ss1.tail_node(), 0 };
683
684 return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
685 }
686
687private:
688 SubStream _ss0;
689 SubStream _ss1;
690 EltwiseOperation _op;
691};
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000692/** Flatten Layer */
693class FlattenLayer final : public ILayer
694{
695public:
Alex Gildayc357c472018-03-21 13:54:09 +0000696 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000697 FlattenLayer()
698 {
699 }
700
701 NodeID create_layer(IStream &s) override
702 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100703 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000704 NodeIdxPair input = { s.tail_node(), 0 };
705 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
706 }
707};
708
709/** Fully Connected Layer */
710class FullyConnectedLayer final : public ILayer
711{
712public:
Alex Gildayc357c472018-03-21 13:54:09 +0000713 /** Construct a fully connected layer.
714 *
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100715 * @param[in] num_outputs Number of outputs.
716 * @param[in] weights Accessor to get weights from.
717 * @param[in] bias Accessor to get bias from.
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100718 * @param[in] fc_info (Optional) Fully connected layer metadata
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100719 * @param[in] weights_quant_info (Optional) Weights quantization information
720 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000721 */
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100722 FullyConnectedLayer(unsigned int num_outputs,
723 ITensorAccessorUPtr weights,
724 ITensorAccessorUPtr bias,
725 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
726 const QuantizationInfo weights_quant_info = QuantizationInfo(),
727 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100728 : _num_outputs(num_outputs),
729 _weights(std::move(weights)),
730 _bias(std::move(bias)),
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000731 _weights_ss(nullptr),
732 _bias_ss(nullptr),
733 _fc_info(fc_info),
734 _weights_quant_info(std::move(weights_quant_info)),
735 _out_quant_info(std::move(out_quant_info))
736 {
737 }
738
739 /** Construct a fully connected layer.
740 *
741 * @param[in] num_outputs Number of outputs.
742 * @param[in] sub_stream_weights Graph sub-stream for the weights.
743 * @param[in] sub_stream_bias Graph sub-stream for the bias.
744 * @param[in] fc_info (Optional) Fully connected layer metadata
745 * @param[in] weights_quant_info (Optional) Weights quantization information
746 * @param[in] out_quant_info (Optional) Output quantization info
747 */
748 FullyConnectedLayer(unsigned int num_outputs,
Michalis Spyrou1a569a32019-09-10 17:20:34 +0100749 SubStream sub_stream_weights,
750 SubStream sub_stream_bias,
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000751 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
752 const QuantizationInfo weights_quant_info = QuantizationInfo(),
753 const QuantizationInfo out_quant_info = QuantizationInfo())
754 : _num_outputs(num_outputs),
755 _weights(nullptr),
756 _bias(nullptr),
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000757 _weights_ss(std::make_unique<SubStream>(std::move(sub_stream_weights))),
758 _bias_ss(std::make_unique<SubStream>(std::move(sub_stream_bias))),
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100759 _fc_info(fc_info),
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100760 _weights_quant_info(std::move(weights_quant_info)),
761 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000762 {
763 }
764
Michele Di Giorgio47e6fed2018-11-13 12:04:25 +0000765 /** Create layer and add to the given stream.
766 *
767 * @param[in] s Stream to add layer to.
768 *
769 * @return ID of the created node.
770 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000771 NodeID create_layer(IStream &s) override
772 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100773 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000774 NodeIdxPair input = { s.tail_node(), 0 };
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000775 if(_weights != nullptr)
776 {
777 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
778 std::move(_weights), std::move(_bias), _fc_info,
cfRodf2c022e2021-11-05 11:29:53 +0000779 std::move(_weights_quant_info), std::move(_out_quant_info), s.hints().fast_math_hint);
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000780 }
781 else
782 {
783 ARM_COMPUTE_ERROR_ON(_weights_ss == nullptr);
784
785 NodeID bias_nid = (_bias_ss == nullptr) ? EmptyNodeID : _bias_ss->tail_node();
786 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
787 _weights_ss->tail_node(), bias_nid, _fc_info,
cfRodf2c022e2021-11-05 11:29:53 +0000788 std::move(_out_quant_info), s.hints().fast_math_hint);
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000789 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000790 }
791
792private:
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100793 unsigned int _num_outputs;
794 ITensorAccessorUPtr _weights;
795 ITensorAccessorUPtr _bias;
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000796 std::unique_ptr<SubStream> _weights_ss;
797 std::unique_ptr<SubStream> _bias_ss;
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100798 const FullyConnectedLayerInfo _fc_info;
799 const QuantizationInfo _weights_quant_info;
800 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000801};
802
Manuel Bottini5209be52019-02-13 16:34:56 +0000803/** Generate Proposals Layer */
804class GenerateProposalsLayer final : public ILayer
805{
806public:
807 /** Construct a generate proposals layer.
808 *
809 * @param[in] ss_scores Graph sub-stream for the scores.
810 * @param[in] ss_deltas Graph sub-stream for the deltas.
811 * @param[in] ss_anchors Graph sub-stream for the anchors.
812 * @param[in] info Generate Proposals operation information.
813 */
814 GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
815 : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
816 {
817 }
818
819 /** Create layer and add to the given stream.
820 *
821 * @param[in] s Stream to add layer to.
822 *
823 * @return ID of the created node.
824 */
825 NodeID create_layer(IStream &s) override
826 {
827 NodeParams common_params = { name(), s.hints().target_hint };
828 NodeIdxPair scores = { _ss_scores.tail_node(), 0 };
829 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
830 NodeIdxPair anchors = { _ss_anchors.tail_node(), 0 };
831 return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
832 }
833
834private:
835 SubStream _ss_scores;
836 SubStream _ss_deltas;
837 SubStream _ss_anchors;
838 GenerateProposalsInfo _info;
839};
840
thecha013603aff2020-09-01 14:52:38 +0100841/** L2 Normalize Layer */
842class L2NormalizeLayer final : public ILayer
843{
844public:
845 /** Construct a L2 Normalize layer.
846 *
847 * @param[in] axis Axis to perform normalization on
848 * @param[in] epsilon Lower bound value for the normalization
849 */
850 L2NormalizeLayer(int axis, float epsilon)
851 : _axis(axis), _epsilon(epsilon)
852 {
853 }
854
855 NodeID create_layer(IStream &s) override
856 {
857 NodeParams common_params = { name(), s.hints().target_hint };
858 NodeIdxPair input = { s.tail_node(), 0 };
859 return GraphBuilder::add_l2_normalize_node(s.graph(), common_params, input, _axis, _epsilon);
860 }
861
862private:
863 int _axis;
864 float _epsilon;
865};
866
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000867/** Normalization Layer */
868class NormalizationLayer final : public ILayer
869{
870public:
Alex Gildayc357c472018-03-21 13:54:09 +0000871 /** Construct a normalization layer.
872 *
873 * @param[in] norm_info Normalization information.
874 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000875 NormalizationLayer(NormalizationLayerInfo norm_info)
876 : _norm_info(norm_info)
877 {
878 }
879
880 NodeID create_layer(IStream &s) override
881 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100882 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000883 NodeIdxPair input = { s.tail_node(), 0 };
884 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
885 }
886
887private:
888 NormalizationLayerInfo _norm_info;
889};
890
Michele Di Giorgio555d1102018-09-12 13:51:59 +0100891/** Normalize planar YUV Layer */
892class NormalizePlanarYUVLayer final : public ILayer
893{
894public:
895 /** Construct a normalize planar YUV layer.
896 *
897 * @param[in] mean Accessor to get mean tensor data from.
898 * @param[in] std Accessor to get std tensor data from.
899 */
900 NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
901 ITensorAccessorUPtr std)
902 : _mean(std::move(mean)), _std(std::move(std))
903 {
904 }
905
906 NodeID create_layer(IStream &s) override
907 {
908 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
909 ARM_COMPUTE_ERROR_ON(_std == nullptr);
910
911 NodeParams common_params = { name(), s.hints().target_hint };
912 NodeIdxPair input = { s.tail_node(), 0 };
913 return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
914 std::move(_mean), std::move(_std));
915 }
916
917private:
918 ITensorAccessorUPtr _mean;
919 ITensorAccessorUPtr _std;
920};
921
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100922/** Pad Layer */
923class PadLayer final : public ILayer
924{
925public:
926 /** Construct a pad layer.
927 *
Georgios Pinitas102b0ce2020-02-13 17:59:09 +0000928 * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
929 * specifies the front and the end padding in the i-th dimension.
930 * @param[in] pad_value Padding value to use. Defaults to 0.
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100931 */
Georgios Pinitas102b0ce2020-02-13 17:59:09 +0000932 PadLayer(PaddingList padding, PixelValue pad_value = PixelValue())
933 : _padding(padding), _pad_value(pad_value)
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100934 {
935 }
936
937 NodeID create_layer(IStream &s) override
938 {
939 NodeParams common_params = { name(), s.hints().target_hint };
940 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas102b0ce2020-02-13 17:59:09 +0000941 return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding, _pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100942 }
943
944private:
945 PaddingList _padding;
Georgios Pinitas102b0ce2020-02-13 17:59:09 +0000946 PixelValue _pad_value;
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100947};
948
Georgios Pinitas57c48242018-08-02 13:41:49 +0100949/** Permute Layer */
950class PermuteLayer final : public ILayer
951{
952public:
953 /** Construct a permute layer.
954 *
955 * @param[in] perm Permutation vector.
956 * @param[in] layout (Optional) Data layout to assign to permuted tensor.
957 * If UNKNOWN then the input's layout will be used.
958 */
959 PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
960 : _perm(perm), _layout(layout)
961 {
962 }
963
964 NodeID create_layer(IStream &s) override
965 {
966 NodeParams common_params = { name(), s.hints().target_hint };
967 NodeIdxPair input = { s.tail_node(), 0 };
968 return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
969 }
970
971private:
972 PermutationVector _perm;
973 DataLayout _layout;
974};
975
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000976/** Pooling Layer */
977class PoolingLayer final : public ILayer
978{
979public:
Alex Gildayc357c472018-03-21 13:54:09 +0000980 /** Construct a pooling layer.
981 *
982 * @param[in] pool_info Pooling information.
983 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000984 PoolingLayer(PoolingLayerInfo pool_info)
985 : _pool_info(pool_info)
986 {
987 }
988
989 NodeID create_layer(IStream &s) override
990 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100991 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000992 NodeIdxPair input = { s.tail_node(), 0 };
993 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
994 }
995
996private:
997 PoolingLayerInfo _pool_info;
998};
999
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001000/** PRelu Layer */
1001class PReluLayer final : public ILayer
1002{
1003public:
1004 /** Construct an PRelu operation layer
1005 *
1006 * @param[in] sub_stream0 First graph sub-stream
1007 * @param[in] sub_stream1 First graph sub-stream
1008 */
1009 PReluLayer(SubStream &&sub_stream0, SubStream &&sub_stream1)
1010 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1))
1011 {
1012 }
1013
1014 NodeID create_layer(IStream &s) override
1015 {
1016 NodeParams common_params = { name(), s.hints().target_hint };
1017 NodeIdxPair input = { _ss0.tail_node(), 0 };
1018 NodeIdxPair alpha = { _ss1.tail_node(), 0 };
1019
1020 return GraphBuilder::add_prelu_node(s.graph(), common_params, input, alpha);
1021 }
1022
1023private:
1024 SubStream _ss0;
1025 SubStream _ss1;
1026};
1027
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001028/** Print Layer */
1029class PrintLayer final : public ILayer
1030{
1031public:
1032 /** Construct a print layer.
1033 *
1034 * Example usage to locally dequantize and print a tensor:
1035 *
1036 * Tensor *output = new Tensor();
1037 * const auto transform = [output](ITensor *input)
1038 * {
1039 * output->allocator()->init(*input->info());
1040 * output->info()->set_data_type(DataType::F32);
1041 * output->allocator()->allocate();
1042 *
1043 * Window win;
1044 * win.use_tensor_dimensions(input->info()->tensor_shape());
1045 * Iterator in(input, win);
1046 * Iterator out(output, win);
1047 * execute_window_loop(win, [&](const Coordinates &)
1048 * {
1049 * *(reinterpret_cast<float *>(out.ptr())) = dequantize_qasymm8(*in.ptr(), input->info()->quantization_info().uniform());
1050 * }, in, out);
1051 *
1052 * return output;
1053 * };
1054 *
1055 * graph << InputLayer(input_descriptor.set_quantization_info(in_quant_info), get_input_accessor(common_params, nullptr, false))
1056 * << ...
1057 * << \\ CNN Layers
1058 * << ...
1059 * << PrintLayer(std::cout, IOFormatInfo(), transform)
1060 * << ...
1061 * << OutputLayer(get_output_accessor(common_params, 5));
1062 *
1063 * @param[in] stream Output stream.
1064 * @param[in] format_info (Optional) Format info.
1065 * @param[in] transform (Optional) Input transform function.
1066 */
1067 PrintLayer(std::ostream &stream, const IOFormatInfo &format_info = IOFormatInfo(), const std::function<ITensor *(ITensor *)> transform = nullptr)
1068 : _stream(stream), _format_info(format_info), _transform(transform)
1069 {
1070 }
1071
1072 NodeID create_layer(IStream &s) override
1073 {
1074 NodeParams common_params = { name(), s.hints().target_hint };
1075 NodeIdxPair input = { s.tail_node(), 0 };
1076 return GraphBuilder::add_print_node(s.graph(), common_params, input, _stream, _format_info, _transform);
1077 }
1078
1079private:
1080 std::ostream &_stream;
1081 const IOFormatInfo &_format_info;
1082 const std::function<ITensor *(ITensor *)> _transform;
1083};
1084
Pablo Tello32521432018-11-15 14:43:10 +00001085/** PriorBox Layer */
1086class PriorBoxLayer final : public ILayer
1087{
1088public:
1089 /** Construct a priorbox layer.
1090 *
1091 * @param[in] sub_stream First graph sub-stream
1092 * @param[in] prior_info PriorBox parameters.
1093 */
Georgios Pinitasf52cd782019-03-25 14:06:14 +00001094 PriorBoxLayer(SubStream &&sub_stream, const PriorBoxLayerInfo &prior_info)
Pablo Tello32521432018-11-15 14:43:10 +00001095 : _ss(std::move(sub_stream)), _prior_info(prior_info)
1096 {
1097 }
1098
1099 NodeID create_layer(IStream &s) override
1100 {
1101 NodeParams common_params = { name(), s.hints().target_hint };
1102 NodeIdxPair input0 = { s.tail_node(), 0 };
1103 NodeIdxPair input1 = { _ss.tail_node(), 0 };
1104 return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
1105 }
1106
1107private:
1108 SubStream _ss;
1109 PriorBoxLayerInfo _prior_info;
1110};
1111
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001112/** Quantization Layer */
1113class QuantizationLayer final : public ILayer
1114{
1115public:
1116 /** Construct a quantization layer.
1117 *
1118 * @param[in] out_quant_info Output tensor quantization info
1119 */
1120 QuantizationLayer(QuantizationInfo out_quant_info)
1121 : _out_quant_info(out_quant_info)
1122 {
1123 }
1124
1125 NodeID create_layer(IStream &s) override
1126 {
1127 NodeParams common_params = { name(), s.hints().target_hint };
1128 NodeIdxPair input = { s.tail_node(), 0 };
1129 return GraphBuilder::add_quantization_node(s.graph(), common_params, input, _out_quant_info);
1130 }
1131
1132private:
1133 QuantizationInfo _out_quant_info;
1134};
1135
thecha01d64444b2020-09-07 14:50:21 +01001136/** Reduction Layer */
1137class ReductionLayer final : public ILayer
1138{
1139public:
1140 /** Construct a reduction layer.
1141 *
1142 * @param[in] op Reduction operation
1143 * @param[in] axis Reduction axis
1144 * @param[in] keep_dims (Optional) Whether to keep the reduced dimension after the operation. Defaults to true.
1145 */
1146 ReductionLayer(ReductionOperation op, unsigned int axis, bool keep_dims)
1147 : _op(op), _axis(axis), _keep_dims(keep_dims)
1148 {
1149 }
1150
1151 NodeID create_layer(IStream &s) override
1152 {
1153 NodeParams common_params = { name(), s.hints().target_hint };
1154 NodeIdxPair input = { s.tail_node(), 0 };
1155 return GraphBuilder::add_reduction_operation_node(s.graph(), common_params, input, _op, _axis, _keep_dims);
1156 }
1157
1158private:
1159 ReductionOperation _op;
1160 unsigned int _axis;
1161 bool _keep_dims;
1162};
1163
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001164/** Reorg Layer */
1165class ReorgLayer final : public ILayer
1166{
1167public:
1168 /** Construct a reorg layer.
1169 *
1170 * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
1171 * It defines the spatial distance between 2 consecutive pixels in the x and y direction
1172 */
1173 ReorgLayer(int stride)
1174 : _stride(stride)
1175 {
1176 }
1177
1178 NodeID create_layer(IStream &s) override
1179 {
1180 NodeParams common_params = { name(), s.hints().target_hint };
1181 NodeIdxPair input = { s.tail_node(), 0 };
1182 return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
1183 }
1184
1185private:
1186 int _stride;
1187};
1188
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001189/** Reshape Layer */
1190class ReshapeLayer final : public ILayer
1191{
1192public:
Alex Gildayc357c472018-03-21 13:54:09 +00001193 /** Construct a reshape layer.
1194 *
1195 * @param[in] shape Target shape.
1196 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001197 ReshapeLayer(TensorShape shape)
1198 : _shape(shape)
1199 {
1200 }
1201
1202 NodeID create_layer(IStream &s) override
1203 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001204 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001205 NodeIdxPair input = { s.tail_node(), 0 };
1206 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
1207 }
1208
1209private:
1210 TensorShape _shape;
1211};
1212
Georgios Pinitas087eaf62018-05-16 15:52:35 +01001213/** Resize Layer */
1214class ResizeLayer final : public ILayer
1215{
1216public:
1217 ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
1218 : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
1219 {
1220 }
1221
1222 NodeID create_layer(IStream &s) override
1223 {
1224 NodeParams common_params = { name(), s.hints().target_hint };
1225 NodeIdxPair input = { s.tail_node(), 0 };
1226 return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
1227 }
1228
1229private:
1230 InterpolationPolicy _policy;
1231 float _width_scale;
1232 float _height_scale;
1233};
1234
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001235/** ROIAlign Layer */
1236class ROIAlignLayer final : public ILayer
1237{
1238public:
1239 /** Construct a RoiAlign layer.
1240 *
1241 * @param[in] sub_stream_input Graph sub-stream for the input
1242 * @param[in] sub_stream_rois Graph sub-stream for the rois
1243 * @param[in] pool_info Pooling information.
1244 */
1245 ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
1246 : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
1247 {
1248 }
1249
1250 /** Prevent instances of this class from being copy constructed */
1251 ROIAlignLayer(const ROIAlignLayer &) = delete;
1252 /** Prevent instances of this class from being copied */
1253 ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
1254
1255 NodeID create_layer(IStream &s) override
1256 {
1257 NodeParams common_params = { name(), s.hints().target_hint };
1258 NodeIdxPair input = { _ss_input.tail_node(), 0 };
1259 NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
1260 return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
1261 }
1262
1263private:
1264 SubStream _ss_input;
1265 SubStream _ss_rois;
1266 ROIPoolingLayerInfo _pool_info;
1267};
1268
Isabella Gottardi88d5b222018-04-06 12:24:55 +01001269/** Scale Layer */
1270class ScaleLayer final : public ILayer
1271{
1272public:
1273 /** Construct a scale layer.
1274 *
1275 * @param[in] mul_w Accessor to get mul weight from.
1276 * @param[in] add_w Accessor to get add weight from.
1277 */
1278 ScaleLayer(ITensorAccessorUPtr mul_w,
1279 ITensorAccessorUPtr add_w)
1280 : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
1281 {
1282 }
1283
1284 NodeID create_layer(IStream &s) override
1285 {
1286 NodeParams common_params = { name(), s.hints().target_hint };
1287 NodeIdxPair input = { s.tail_node(), 0 };
1288 return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
1289 }
1290
1291private:
1292 ITensorAccessorUPtr _mul_w;
1293 ITensorAccessorUPtr _add_w;
1294};
1295
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001296/** Slice Layer */
1297class SliceLayer final : public ILayer
1298{
1299public:
1300 /** Construct a slice layer.
1301 *
1302 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1303 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1304 */
1305 SliceLayer(Coordinates &starts, Coordinates &ends)
1306 : _starts(starts), _ends(ends)
1307 {
1308 }
1309
1310 NodeID create_layer(IStream &s) override
1311 {
1312 NodeParams common_params = { name(), s.hints().target_hint };
1313 NodeIdxPair input = { s.tail_node(), 0 };
1314 return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
1315 }
1316
1317private:
1318 Coordinates _starts;
1319 Coordinates _ends;
1320};
1321
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001322/** Softmax Layer */
1323class SoftmaxLayer final : public ILayer
1324{
1325public:
Alex Gildayc357c472018-03-21 13:54:09 +00001326 /** Construct a softmax layer.
1327 *
1328 * @param[in] beta (Optional) Beta value. Default 1.0.
1329 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001330 SoftmaxLayer(float beta = 1.0f)
1331 : _beta(beta)
1332 {
1333 }
1334
1335 NodeID create_layer(IStream &s) override
1336 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001337 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001338 NodeIdxPair input = { s.tail_node(), 0 };
1339 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
1340 }
1341
1342private:
1343 float _beta;
1344};
Michalis Spyrou96f67692018-09-13 11:39:28 +01001345
Michele Di Giorgioec699752019-03-22 15:25:32 +00001346/** Stack Layer */
1347class StackLayer final : public ILayer
1348{
1349public:
1350 /** Construct a concatenation layer
1351 *
1352 * @param[in] sub_stream1 First graph branch
1353 * @param[in] sub_stream2 Second graph branch
1354 * @param[in] rest_sub_streams Rest sub-graph branches
1355 */
1356 template <typename... Ts>
1357 StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1358 : _sub_streams(), _axis(0)
1359 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001360 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
1361 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
Michele Di Giorgioec699752019-03-22 15:25:32 +00001362
1363 utility::for_each([&](SubStream && sub_stream)
1364 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001365 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
Michele Di Giorgioec699752019-03-22 15:25:32 +00001366 },
1367 std::move(rest_sub_streams)...);
1368 }
1369 /** Construct a concatenation layer
1370 *
1371 * @param[in] axis Stack layer axis along which to stack the inputs
1372 * @param[in] sub_stream1 First graph branch
1373 * @param[in] sub_stream2 Second graph branch
1374 * @param[in] rest_sub_streams Rest sub-graph branches
1375 */
1376 template <typename... Ts>
1377 StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1378 : _sub_streams(), _axis(axis)
1379 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001380 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
1381 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
Michele Di Giorgioec699752019-03-22 15:25:32 +00001382
1383 utility::for_each([&](SubStream && sub_stream)
1384 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001385 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
Michele Di Giorgioec699752019-03-22 15:25:32 +00001386 },
1387 std::move(rest_sub_streams)...);
1388 }
1389 /** Construct a concat layer
1390 *
1391 * @param[in] sub_stream Sub-stream
1392 */
1393 template <typename... Ts>
1394 StackLayer(SubStream &&sub_stream)
1395 : _sub_streams(), _axis(0)
1396 {
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001397 _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
Michele Di Giorgioec699752019-03-22 15:25:32 +00001398 }
1399 NodeID create_layer(IStream &s) override
1400 {
1401 NodeID nid = EmptyNodeID;
1402 NodeParams common_params = { name(), s.hints().target_hint };
1403 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
1404 {
1405 nid = _sub_streams[0]->tail_node();
1406 }
1407 else
1408 {
1409 // Collect tail nodes and stack
1410 std::vector<NodeIdxPair> nodes;
1411 for(auto &ss : _sub_streams)
1412 {
1413 if(ss && (ss->tail_node() != EmptyNodeID))
1414 {
1415 const auto tail_node = s.graph().node(ss->tail_node());
1416 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
1417 {
1418 nodes.push_back({ ss->tail_node(), 0 });
1419 }
1420 }
1421 }
1422 nid = GraphBuilder::add_stack_node(s.graph(), common_params, nodes, _axis);
1423 }
1424 return nid;
1425 }
1426
1427private:
1428 std::vector<std::unique_ptr<SubStream>> _sub_streams;
1429 int _axis;
1430};
1431
thecha012bfadd92020-08-12 17:25:51 +01001432/** StridedSlice Layer */
1433class StridedSliceLayer final : public ILayer
1434{
1435public:
1436 /** Construct a strided slice layer.
1437 *
1438 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1439 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1440 * @param[in] strides The strides of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1441 * @param[in] strided_slice_info Contains masks for the starts, ends and strides
1442 */
1443 StridedSliceLayer(Coordinates &starts, Coordinates &ends, BiStrides &strides, StridedSliceLayerInfo strided_slice_info)
1444 : _starts(starts), _ends(ends), _strides(strides), _info(strided_slice_info)
1445 {
1446 }
1447
1448 NodeID create_layer(IStream &s) override
1449 {
1450 NodeParams common_params = { name(), s.hints().target_hint };
1451 NodeIdxPair input = { s.tail_node(), 0 };
1452 return GraphBuilder::add_strided_slice_node(s.graph(), common_params, input, _starts, _ends, _strides, _info);
1453 }
1454
1455private:
1456 Coordinates _starts;
1457 Coordinates _ends;
1458 BiStrides _strides;
1459 StridedSliceLayerInfo _info;
1460};
1461
Michalis Spyrou96f67692018-09-13 11:39:28 +01001462/** YOLO Layer */
1463class YOLOLayer final : public ILayer
1464{
1465public:
1466 /** Construct a YOLO layer.
1467 *
Michele Di Giorgioee82d342021-01-05 16:14:28 +00001468 * @param[in] act_info Activation info
Michalis Spyrou96f67692018-09-13 11:39:28 +01001469 */
Georgios Pinitas0b1c2db2020-12-04 15:51:34 +00001470 YOLOLayer(ActivationLayerInfo act_info)
1471 : _act_info(act_info)
Michalis Spyrou96f67692018-09-13 11:39:28 +01001472 {
1473 }
1474
1475 NodeID create_layer(IStream &s) override
1476 {
1477 NodeParams common_params = { name(), s.hints().target_hint };
1478 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas0b1c2db2020-12-04 15:51:34 +00001479 return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info);
Michalis Spyrou96f67692018-09-13 11:39:28 +01001480 }
1481
1482private:
1483 ActivationLayerInfo _act_info;
Michalis Spyrou96f67692018-09-13 11:39:28 +01001484};
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001485} // namespace frontend
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001486} // namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001487} // namespace arm_compute
Michalis Spyrouf4643372019-11-29 16:17:13 +00001488#endif /* ARM_COMPUTE_GRAPH_LAYERS_H */