blob: 2b44d0e8449c6c43ebd9291a35301bcba497267b [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00002 * Copyright (c) 2018-2020 ARM Limited.
Georgios Pinitasd8734b52017-12-22 15:27:52 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_LAYERS_H
25#define ARM_COMPUTE_GRAPH_LAYERS_H
Georgios Pinitasd8734b52017-12-22 15:27:52 +000026
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010027#include "arm_compute/graph/GraphBuilder.h"
28#include "arm_compute/graph/Types.h"
29#include "arm_compute/graph/frontend/ILayer.h"
30#include "arm_compute/graph/frontend/IStream.h"
31#include "arm_compute/graph/frontend/SubStream.h"
Georgios Pinitasd8734b52017-12-22 15:27:52 +000032
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010040namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +000041{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010060 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000061 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
Michalis Spyrou1a569a32019-09-10 17:20:34 +010069/** Constant Layer */
70class ConstantLayer final : public ILayer
71{
72public:
73 /** Construct a constant layer.
74 *
75 * @param[in] desc Description of input tensor.
76 * @param[in] accessor Accessor to get input tensor data from.
77 */
78 ConstantLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
79 : _desc(desc), _accessor(std::move(accessor))
80 {
81 }
82
83 NodeID create_layer(IStream &s) override
84 {
85 NodeParams common_params = { name(), s.hints().target_hint };
86 return GraphBuilder::add_const_node(s.graph(), common_params, _desc, std::move(_accessor));
87 }
88
89private:
90 TensorDescriptor _desc;
91 ITensorAccessorUPtr _accessor;
92};
93
Georgios Pinitasd8734b52017-12-22 15:27:52 +000094/** Output Layer */
95class OutputLayer final : public ILayer
96{
97public:
Alex Gildayc357c472018-03-21 13:54:09 +000098 /** Construct an output layer.
99 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000100 * @param[in] accessor Accessor to give output tensor data to.
101 * @param[in] connection_idx (Optional) Input connection index
Alex Gildayc357c472018-03-21 13:54:09 +0000102 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000103 OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx = 0)
104 : _accessor(std::move(accessor)), _connection_idx(connection_idx)
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000105 {
106 }
107
108 NodeID create_layer(IStream &s) override
109 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100110 NodeParams common_params = { name(), s.hints().target_hint };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000111 NodeIdxPair input = { s.tail_node(), _connection_idx };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000112 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
113 }
114
115private:
116 ITensorAccessorUPtr _accessor;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000117 unsigned int _connection_idx;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000118};
119
120/** Activation Layer */
121class ActivationLayer final : public ILayer
122{
123public:
Alex Gildayc357c472018-03-21 13:54:09 +0000124 /** Construct an activation layer.
125 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000126 * @param[in] act_info Activation information
127 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000128 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000129 ActivationLayer(ActivationLayerInfo act_info,
130 const QuantizationInfo out_quant_info = QuantizationInfo())
131 : _act_info(act_info),
132 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000133 {
134 }
135
136 NodeID create_layer(IStream &s) override
137 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100138 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000139 NodeIdxPair input = { s.tail_node(), 0 };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000140 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info, std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000141 }
142
143private:
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000144 ActivationLayerInfo _act_info;
145 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000146};
147
148/** Batchnormalization Layer */
149class BatchNormalizationLayer final : public ILayer
150{
151public:
Alex Gildayc357c472018-03-21 13:54:09 +0000152 /** Construct a batch normalization layer.
153 *
154 * @param[in] mean Accessor to get mean tensor data from.
155 * @param[in] var Accessor to get var tensor data from.
156 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
157 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
158 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
159 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000160 BatchNormalizationLayer(ITensorAccessorUPtr mean,
161 ITensorAccessorUPtr var,
162 ITensorAccessorUPtr gamma = nullptr,
163 ITensorAccessorUPtr beta = nullptr,
164 float epsilon = 0.001f)
165 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
166 {
167 }
168
169 NodeID create_layer(IStream &s) override
170 {
171 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
172 ARM_COMPUTE_ERROR_ON(_var == nullptr);
173
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100174 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000175 NodeIdxPair input = { s.tail_node(), 0 };
176 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
177 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
178 }
179
180private:
181 ITensorAccessorUPtr _mean;
182 ITensorAccessorUPtr _var;
183 ITensorAccessorUPtr _gamma;
184 ITensorAccessorUPtr _beta;
185 float _epsilon;
186};
187
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100188/** Bounding Box Transform Layer */
189class BoundingBoxTransformLayer final : public ILayer
190{
191public:
192 /** Construct a bounding box transform layer.
193 *
194 * @param[in] sub_stream_input Graph sub-stream for the input
195 * @param[in] sub_stream_deltas Graph sub-stream for the deltas
196 * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
197 */
198 BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
199 : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
200 {
201 }
202
203 /** Create layer and add to the given stream.
204 *
205 * @param[in] s Stream to add layer to.
206 *
207 * @return ID of the created node.
208 */
209 NodeID create_layer(IStream &s) override
210 {
211 NodeParams common_params = { name(), s.hints().target_hint };
212 NodeIdxPair input = { _ss_input.tail_node(), 0 };
213 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
214 return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
215 }
216
217private:
218 SubStream _ss_input;
219 SubStream _ss_deltas;
220 BoundingBoxTransformInfo _bbox_info;
221};
222
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100223/** Channel Shuffle Layer */
224class ChannelShuffleLayer final : public ILayer
225{
226public:
227 /** Construct a Channel Shuffle layer.
228 *
229 * @param[in] num_groups Number of groups
230 */
231 ChannelShuffleLayer(unsigned int num_groups)
232 : _num_groups(num_groups)
233 {
234 }
235
236 NodeID create_layer(IStream &s) override
237 {
238 NodeParams common_params = { name(), s.hints().target_hint };
239 NodeIdxPair input = { s.tail_node(), 0 };
240 return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
241 }
242
243private:
244 unsigned int _num_groups;
245};
246
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100247/** Concat Layer */
248class ConcatLayer final : public ILayer
249{
250public:
251 /** Construct a concatenation layer
252 *
253 * @param[in] sub_stream1 First graph branch
254 * @param[in] sub_stream2 Second graph branch
255 * @param[in] rest_sub_streams Rest sub-graph branches
256 */
257 template <typename... Ts>
258 ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000259 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Pablo Tello32521432018-11-15 14:43:10 +0000260 {
261 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
262 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
263
264 utility::for_each([&](SubStream && sub_stream)
265 {
266 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
267 },
268 std::move(rest_sub_streams)...);
269 }
270 /** Construct a concatenation layer
271 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000272 * @param[in] concat_descriptor Concat layer descriptor
273 * @param[in] sub_stream1 First graph branch
274 * @param[in] sub_stream2 Second graph branch
275 * @param[in] rest_sub_streams Rest sub-graph branches
Pablo Tello32521432018-11-15 14:43:10 +0000276 */
277 template <typename... Ts>
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000278 ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
279 : _sub_streams(), _concat_descriptor(concat_descriptor)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100280 {
281 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
282 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
283
284 utility::for_each([&](SubStream && sub_stream)
285 {
286 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
287 },
288 std::move(rest_sub_streams)...);
289 }
290 /** Construct a concat layer
291 *
292 * @param[in] sub_stream Sub-stream
293 */
294 template <typename... Ts>
295 ConcatLayer(SubStream &&sub_stream)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000296 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100297 {
298 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
299 }
300 NodeID create_layer(IStream &s) override
301 {
302 NodeID nid = EmptyNodeID;
303 NodeParams common_params = { name(), s.hints().target_hint };
304 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
305 {
306 nid = _sub_streams[0]->tail_node();
307 }
308 else
309 {
310 // Collect tail nodes and concatenate
311 std::vector<NodeIdxPair> nodes;
312 for(auto &ss : _sub_streams)
313 {
314 if(ss && (ss->tail_node() != EmptyNodeID))
315 {
316 const auto tail_node = s.graph().node(ss->tail_node());
317 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
318 {
319 nodes.push_back({ ss->tail_node(), 0 });
320 }
321 }
322 }
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000323 nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _concat_descriptor);
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100324 }
325 return nid;
326 }
327
328private:
329 std::vector<std::unique_ptr<SubStream>> _sub_streams;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000330 descriptors::ConcatLayerDescriptor _concat_descriptor;
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100331};
332
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000333/** Convolution Layer */
334class ConvolutionLayer final : public ILayer
335{
336public:
Alex Gildayc357c472018-03-21 13:54:09 +0000337 /** Construct a convolution layer.
338 *
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100339 * @param[in] conv_width Convolution width.
340 * @param[in] conv_height Convolution height.
341 * @param[in] ofm Output feature map.
342 * @param[in] weights Accessor to get kernel weights from.
343 * @param[in] bias Accessor to get kernel bias from.
344 * @param[in] conv_info Padding and stride information.
345 * @param[in] num_groups (Optional) Number of groups. Default: 1.
346 * @param[in] weights_quant_info (Optional) Weights quantization information
347 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000348 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100349 ConvolutionLayer(unsigned int conv_width,
350 unsigned int conv_height,
351 unsigned int ofm,
352 ITensorAccessorUPtr weights,
353 ITensorAccessorUPtr bias,
354 PadStrideInfo conv_info,
355 unsigned int num_groups = 1,
356 const QuantizationInfo weights_quant_info = QuantizationInfo(),
357 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000358 : _conv_width(conv_width),
359 _conv_height(conv_height),
360 _ofm(ofm),
361 _conv_info(std::move(conv_info)),
362 _num_groups(num_groups),
363 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100364 _bias(std::move(bias)),
365 _weights_quant_info(std::move(weights_quant_info)),
366 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000367 {
368 }
369
370 NodeID create_layer(IStream &s) override
371 {
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000372 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100373 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000374 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
Georgios Pinitasee33ea52018-03-08 16:01:29 +0000375 Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
Giorgio Arena59631a12018-05-02 13:59:04 +0100376 s.hints().convolution_method_hint, s.hints().fast_math_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100377 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000378 }
379
380private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100381 unsigned int _conv_width;
382 unsigned int _conv_height;
383 unsigned int _ofm;
384 const PadStrideInfo _conv_info;
385 unsigned int _num_groups;
386 ITensorAccessorUPtr _weights;
387 ITensorAccessorUPtr _bias;
388 const QuantizationInfo _weights_quant_info;
389 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000390};
391
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100392/** Deconvolution Layer */
393class DeconvolutionLayer final : public ILayer
394{
395public:
396 /** Construct a convolution layer.
397 *
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100398 * @param[in] conv_width Convolution width.
399 * @param[in] conv_height Convolution height.
400 * @param[in] ofm Output feature map.
401 * @param[in] weights Accessor to get kernel weights from.
402 * @param[in] bias Accessor to get kernel bias from.
403 * @param[in] deconv_info Padding and stride information.
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100404 */
405 DeconvolutionLayer(unsigned int conv_width,
406 unsigned int conv_height,
407 unsigned int ofm,
408 ITensorAccessorUPtr weights,
409 ITensorAccessorUPtr bias,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100410 PadStrideInfo deconv_info)
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100411 : _conv_width(conv_width),
412 _conv_height(conv_height),
413 _ofm(ofm),
414 _deconv_info(std::move(deconv_info)),
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100415 _weights(std::move(weights)),
416 _bias(std::move(bias))
417 {
418 }
419
420 NodeID create_layer(IStream &s) override
421 {
422 NodeIdxPair input = { s.tail_node(), 0 };
423 NodeParams common_params = { name(), s.hints().target_hint };
424 return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100425 Size2D(_conv_width, _conv_height), _ofm, _deconv_info,
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100426 std::move(_weights), std::move(_bias));
427 }
428
429private:
430 unsigned int _conv_width;
431 unsigned int _conv_height;
432 unsigned int _ofm;
433 const PadStrideInfo _deconv_info;
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100434 ITensorAccessorUPtr _weights;
435 ITensorAccessorUPtr _bias;
436};
437
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000438/** Depthwise Convolution Layer */
439class DepthwiseConvolutionLayer final : public ILayer
440{
441public:
Alex Gildayc357c472018-03-21 13:54:09 +0000442 /** Construct a depthwise convolution layer.
443 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000444 * @param[in] conv_width Convolution width.
445 * @param[in] conv_height Convolution height.
446 * @param[in] weights Accessor to get kernel weights from.
447 * @param[in] bias Accessor to get kernel bias from.
448 * @param[in] conv_info Padding and stride information.
449 * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
450 * @param[in] weights_quant_info (Optional) Quantization info used for weights
451 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000452 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100453 DepthwiseConvolutionLayer(unsigned int conv_width,
454 unsigned int conv_height,
455 ITensorAccessorUPtr weights,
456 ITensorAccessorUPtr bias,
457 PadStrideInfo conv_info,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000458 int depth_multiplier = 1,
459 const QuantizationInfo weights_quant_info = QuantizationInfo(),
460 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000461 : _conv_width(conv_width),
462 _conv_height(conv_height),
463 _conv_info(std::move(conv_info)),
464 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100465 _bias(std::move(bias)),
Georgios Pinitas05045c12018-12-07 18:31:47 +0000466 _depth_multiplier(depth_multiplier),
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000467 _weights_quant_info(std::move(weights_quant_info)),
468 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000469 {
470 }
471
472 NodeID create_layer(IStream &s) override
473 {
474 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100475 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000476 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000477 input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000478 s.hints().depthwise_convolution_method_hint,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000479 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000480 }
481
482private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100483 unsigned int _conv_width;
484 unsigned int _conv_height;
485 const PadStrideInfo _conv_info;
486 ITensorAccessorUPtr _weights;
487 ITensorAccessorUPtr _bias;
Georgios Pinitas05045c12018-12-07 18:31:47 +0000488 int _depth_multiplier;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000489 const QuantizationInfo _weights_quant_info;
490 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000491};
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000492/** Dequantization Layer */
493class DequantizationLayer final : public ILayer
494{
495public:
496 /** Construct a dequantization layer.
497 *
498 */
499 DequantizationLayer()
500 {
501 }
502
503 NodeID create_layer(IStream &s) override
504 {
505 NodeParams common_params = { name(), s.hints().target_hint };
506 NodeIdxPair input = { s.tail_node(), 0 };
507 return GraphBuilder::add_dequantization_node(s.graph(), common_params, input);
508 }
509};
510
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000511/** DetectionOutput Layer */
512class DetectionOutputLayer final : public ILayer
513{
514public:
515 /** Construct a detection output layer.
516 *
517 * @param[in] sub_stream_conf Confidence graph sub-stream.
518 * @param[in] sub_stream_prior PriorBox graph sub-stream.
519 * @param[in] detect_info DetectionOutput parameters.
520 */
Georgios Pinitasf52cd782019-03-25 14:06:14 +0000521 DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, const DetectionOutputLayerInfo &detect_info)
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000522 : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
523 {
524 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000525
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000526 NodeID create_layer(IStream &s) override
527 {
528 NodeParams common_params = { name(), s.hints().target_hint };
529 NodeIdxPair input_loc = { s.tail_node(), 0 };
530 NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
531 NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
532 return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
533 }
534
535private:
536 SubStream _ss_conf;
537 SubStream _ss_prior;
538 DetectionOutputLayerInfo _detect_info;
539};
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000540/** DetectionOutputPostProcess Layer */
541class DetectionPostProcessLayer final : public ILayer
542{
543public:
544 /** Construct a detection output layer.
545 *
546 * @param[in] sub_stream_class_prediction Class prediction graph sub-stream.
547 * @param[in] detect_info DetectionOutput parameters.
548 * @param[in] anchors Accessor to get anchors tensor data from.
549 * @param[in] out_quant_info (Optional) Output quantization info
550 */
551 DetectionPostProcessLayer(SubStream &&sub_stream_class_prediction, DetectionPostProcessLayerInfo detect_info, ITensorAccessorUPtr anchors,
552 const QuantizationInfo out_quant_info = QuantizationInfo())
553 : _sub_stream_class_prediction(std::move(sub_stream_class_prediction)), _detect_info(detect_info), _anchors(std::move(anchors)), _out_quant_info(std::move(out_quant_info))
554 {
555 }
556
557 NodeID create_layer(IStream &s) override
558 {
559 ARM_COMPUTE_ERROR_ON(_anchors == nullptr);
560
561 NodeParams common_params = { name(), s.hints().target_hint };
562 NodeIdxPair input_box_encoding = { s.tail_node(), 0 };
563 NodeIdxPair input_class_prediction = { _sub_stream_class_prediction.tail_node(), 0 };
564 return GraphBuilder::add_detection_post_process_node(s.graph(), common_params, input_box_encoding, input_class_prediction, _detect_info, std::move(_anchors), std::move(_out_quant_info));
565 }
566
567private:
568 SubStream _sub_stream_class_prediction;
569 DetectionPostProcessLayerInfo _detect_info;
570 ITensorAccessorUPtr _anchors;
571 const QuantizationInfo _out_quant_info;
572};
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100573/** Dummy Layer */
574class DummyLayer final : public ILayer
575{
576public:
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000577 /** Construct a dummy layer.
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100578 *
579 * @param[in] shape Output shape
580 */
581 DummyLayer(TensorShape shape)
582 : _shape(shape)
583 {
584 }
585
586 NodeID create_layer(IStream &s) override
587 {
588 NodeParams common_params = { name(), s.hints().target_hint };
589 NodeIdxPair input = { s.tail_node(), 0 };
590 return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
591 }
592
593private:
594 TensorShape _shape;
595};
596
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100597class EltwiseLayer final : public ILayer
598{
599public:
600 /** Construct an element-wise operation layer
601 *
602 * @param[in] sub_stream0 First graph sub-stream
603 * @param[in] sub_stream1 First graph sub-stream
604 * @param[in] op Element-wise operation to perform
605 */
606 EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
607 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
608 {
609 }
610
611 NodeID create_layer(IStream &s) override
612 {
613 NodeParams common_params = { name(), s.hints().target_hint };
614 NodeIdxPair input0 = { _ss0.tail_node(), 0 };
615 NodeIdxPair input1 = { _ss1.tail_node(), 0 };
616
617 return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
618 }
619
620private:
621 SubStream _ss0;
622 SubStream _ss1;
623 EltwiseOperation _op;
624};
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000625/** Flatten Layer */
626class FlattenLayer final : public ILayer
627{
628public:
Alex Gildayc357c472018-03-21 13:54:09 +0000629 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000630 FlattenLayer()
631 {
632 }
633
634 NodeID create_layer(IStream &s) override
635 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100636 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000637 NodeIdxPair input = { s.tail_node(), 0 };
638 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
639 }
640};
641
642/** Fully Connected Layer */
643class FullyConnectedLayer final : public ILayer
644{
645public:
Alex Gildayc357c472018-03-21 13:54:09 +0000646 /** Construct a fully connected layer.
647 *
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100648 * @param[in] num_outputs Number of outputs.
649 * @param[in] weights Accessor to get weights from.
650 * @param[in] bias Accessor to get bias from.
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100651 * @param[in] fc_info (Optional) Fully connected layer metadata
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100652 * @param[in] weights_quant_info (Optional) Weights quantization information
653 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000654 */
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100655 FullyConnectedLayer(unsigned int num_outputs,
656 ITensorAccessorUPtr weights,
657 ITensorAccessorUPtr bias,
658 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
659 const QuantizationInfo weights_quant_info = QuantizationInfo(),
660 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100661 : _num_outputs(num_outputs),
662 _weights(std::move(weights)),
663 _bias(std::move(bias)),
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000664 _weights_ss(nullptr),
665 _bias_ss(nullptr),
666 _fc_info(fc_info),
667 _weights_quant_info(std::move(weights_quant_info)),
668 _out_quant_info(std::move(out_quant_info))
669 {
670 }
671
672 /** Construct a fully connected layer.
673 *
674 * @param[in] num_outputs Number of outputs.
675 * @param[in] sub_stream_weights Graph sub-stream for the weights.
676 * @param[in] sub_stream_bias Graph sub-stream for the bias.
677 * @param[in] fc_info (Optional) Fully connected layer metadata
678 * @param[in] weights_quant_info (Optional) Weights quantization information
679 * @param[in] out_quant_info (Optional) Output quantization info
680 */
681 FullyConnectedLayer(unsigned int num_outputs,
Michalis Spyrou1a569a32019-09-10 17:20:34 +0100682 SubStream sub_stream_weights,
683 SubStream sub_stream_bias,
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000684 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
685 const QuantizationInfo weights_quant_info = QuantizationInfo(),
686 const QuantizationInfo out_quant_info = QuantizationInfo())
687 : _num_outputs(num_outputs),
688 _weights(nullptr),
689 _bias(nullptr),
690 _weights_ss(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream_weights))),
691 _bias_ss(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream_bias))),
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100692 _fc_info(fc_info),
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100693 _weights_quant_info(std::move(weights_quant_info)),
694 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000695 {
696 }
697
Michele Di Giorgio47e6fed2018-11-13 12:04:25 +0000698 /** Create layer and add to the given stream.
699 *
700 * @param[in] s Stream to add layer to.
701 *
702 * @return ID of the created node.
703 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000704 NodeID create_layer(IStream &s) override
705 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100706 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000707 NodeIdxPair input = { s.tail_node(), 0 };
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000708 if(_weights != nullptr)
709 {
710 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
711 std::move(_weights), std::move(_bias), _fc_info,
712 std::move(_weights_quant_info), std::move(_out_quant_info));
713 }
714 else
715 {
716 ARM_COMPUTE_ERROR_ON(_weights_ss == nullptr);
717
718 NodeID bias_nid = (_bias_ss == nullptr) ? EmptyNodeID : _bias_ss->tail_node();
719 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
720 _weights_ss->tail_node(), bias_nid, _fc_info,
721 std::move(_out_quant_info));
722 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000723 }
724
725private:
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100726 unsigned int _num_outputs;
727 ITensorAccessorUPtr _weights;
728 ITensorAccessorUPtr _bias;
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000729 std::unique_ptr<SubStream> _weights_ss;
730 std::unique_ptr<SubStream> _bias_ss;
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100731 const FullyConnectedLayerInfo _fc_info;
732 const QuantizationInfo _weights_quant_info;
733 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000734};
735
Manuel Bottini5209be52019-02-13 16:34:56 +0000736/** Generate Proposals Layer */
737class GenerateProposalsLayer final : public ILayer
738{
739public:
740 /** Construct a generate proposals layer.
741 *
742 * @param[in] ss_scores Graph sub-stream for the scores.
743 * @param[in] ss_deltas Graph sub-stream for the deltas.
744 * @param[in] ss_anchors Graph sub-stream for the anchors.
745 * @param[in] info Generate Proposals operation information.
746 */
747 GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
748 : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
749 {
750 }
751
752 /** Create layer and add to the given stream.
753 *
754 * @param[in] s Stream to add layer to.
755 *
756 * @return ID of the created node.
757 */
758 NodeID create_layer(IStream &s) override
759 {
760 NodeParams common_params = { name(), s.hints().target_hint };
761 NodeIdxPair scores = { _ss_scores.tail_node(), 0 };
762 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
763 NodeIdxPair anchors = { _ss_anchors.tail_node(), 0 };
764 return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
765 }
766
767private:
768 SubStream _ss_scores;
769 SubStream _ss_deltas;
770 SubStream _ss_anchors;
771 GenerateProposalsInfo _info;
772};
773
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000774/** Normalization Layer */
775class NormalizationLayer final : public ILayer
776{
777public:
Alex Gildayc357c472018-03-21 13:54:09 +0000778 /** Construct a normalization layer.
779 *
780 * @param[in] norm_info Normalization information.
781 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000782 NormalizationLayer(NormalizationLayerInfo norm_info)
783 : _norm_info(norm_info)
784 {
785 }
786
787 NodeID create_layer(IStream &s) override
788 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100789 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000790 NodeIdxPair input = { s.tail_node(), 0 };
791 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
792 }
793
794private:
795 NormalizationLayerInfo _norm_info;
796};
797
Michele Di Giorgio555d1102018-09-12 13:51:59 +0100798/** Normalize planar YUV Layer */
799class NormalizePlanarYUVLayer final : public ILayer
800{
801public:
802 /** Construct a normalize planar YUV layer.
803 *
804 * @param[in] mean Accessor to get mean tensor data from.
805 * @param[in] std Accessor to get std tensor data from.
806 */
807 NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
808 ITensorAccessorUPtr std)
809 : _mean(std::move(mean)), _std(std::move(std))
810 {
811 }
812
813 NodeID create_layer(IStream &s) override
814 {
815 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
816 ARM_COMPUTE_ERROR_ON(_std == nullptr);
817
818 NodeParams common_params = { name(), s.hints().target_hint };
819 NodeIdxPair input = { s.tail_node(), 0 };
820 return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
821 std::move(_mean), std::move(_std));
822 }
823
824private:
825 ITensorAccessorUPtr _mean;
826 ITensorAccessorUPtr _std;
827};
828
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100829/** Pad Layer */
830class PadLayer final : public ILayer
831{
832public:
833 /** Construct a pad layer.
834 *
835 * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
836 * specifies the front and the end padding in the i-th dimension.
837 */
838 PadLayer(PaddingList padding)
839 : _padding(padding)
840 {
841 }
842
843 NodeID create_layer(IStream &s) override
844 {
845 NodeParams common_params = { name(), s.hints().target_hint };
846 NodeIdxPair input = { s.tail_node(), 0 };
847 return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding);
848 }
849
850private:
851 PaddingList _padding;
852};
853
Georgios Pinitas57c48242018-08-02 13:41:49 +0100854/** Permute Layer */
855class PermuteLayer final : public ILayer
856{
857public:
858 /** Construct a permute layer.
859 *
860 * @param[in] perm Permutation vector.
861 * @param[in] layout (Optional) Data layout to assign to permuted tensor.
862 * If UNKNOWN then the input's layout will be used.
863 */
864 PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
865 : _perm(perm), _layout(layout)
866 {
867 }
868
869 NodeID create_layer(IStream &s) override
870 {
871 NodeParams common_params = { name(), s.hints().target_hint };
872 NodeIdxPair input = { s.tail_node(), 0 };
873 return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
874 }
875
876private:
877 PermutationVector _perm;
878 DataLayout _layout;
879};
880
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000881/** Pooling Layer */
882class PoolingLayer final : public ILayer
883{
884public:
Alex Gildayc357c472018-03-21 13:54:09 +0000885 /** Construct a pooling layer.
886 *
887 * @param[in] pool_info Pooling information.
888 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000889 PoolingLayer(PoolingLayerInfo pool_info)
890 : _pool_info(pool_info)
891 {
892 }
893
894 NodeID create_layer(IStream &s) override
895 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100896 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000897 NodeIdxPair input = { s.tail_node(), 0 };
898 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
899 }
900
901private:
902 PoolingLayerInfo _pool_info;
903};
904
Giorgio Arena6e9d0e02020-01-03 15:02:04 +0000905/** Print Layer */
906class PrintLayer final : public ILayer
907{
908public:
909 /** Construct a print layer.
910 *
911 * Example usage to locally dequantize and print a tensor:
912 *
913 * Tensor *output = new Tensor();
914 * const auto transform = [output](ITensor *input)
915 * {
916 * output->allocator()->init(*input->info());
917 * output->info()->set_data_type(DataType::F32);
918 * output->allocator()->allocate();
919 *
920 * Window win;
921 * win.use_tensor_dimensions(input->info()->tensor_shape());
922 * Iterator in(input, win);
923 * Iterator out(output, win);
924 * execute_window_loop(win, [&](const Coordinates &)
925 * {
926 * *(reinterpret_cast<float *>(out.ptr())) = dequantize_qasymm8(*in.ptr(), input->info()->quantization_info().uniform());
927 * }, in, out);
928 *
929 * return output;
930 * };
931 *
932 * graph << InputLayer(input_descriptor.set_quantization_info(in_quant_info), get_input_accessor(common_params, nullptr, false))
933 * << ...
934 * << \\ CNN Layers
935 * << ...
936 * << PrintLayer(std::cout, IOFormatInfo(), transform)
937 * << ...
938 * << OutputLayer(get_output_accessor(common_params, 5));
939 *
940 * @param[in] stream Output stream.
941 * @param[in] format_info (Optional) Format info.
942 * @param[in] transform (Optional) Input transform function.
943 */
944 PrintLayer(std::ostream &stream, const IOFormatInfo &format_info = IOFormatInfo(), const std::function<ITensor *(ITensor *)> transform = nullptr)
945 : _stream(stream), _format_info(format_info), _transform(transform)
946 {
947 }
948
949 NodeID create_layer(IStream &s) override
950 {
951 NodeParams common_params = { name(), s.hints().target_hint };
952 NodeIdxPair input = { s.tail_node(), 0 };
953 return GraphBuilder::add_print_node(s.graph(), common_params, input, _stream, _format_info, _transform);
954 }
955
956private:
957 std::ostream &_stream;
958 const IOFormatInfo &_format_info;
959 const std::function<ITensor *(ITensor *)> _transform;
960};
961
Pablo Tello32521432018-11-15 14:43:10 +0000962/** PriorBox Layer */
963class PriorBoxLayer final : public ILayer
964{
965public:
966 /** Construct a priorbox layer.
967 *
968 * @param[in] sub_stream First graph sub-stream
969 * @param[in] prior_info PriorBox parameters.
970 */
Georgios Pinitasf52cd782019-03-25 14:06:14 +0000971 PriorBoxLayer(SubStream &&sub_stream, const PriorBoxLayerInfo &prior_info)
Pablo Tello32521432018-11-15 14:43:10 +0000972 : _ss(std::move(sub_stream)), _prior_info(prior_info)
973 {
974 }
975
976 NodeID create_layer(IStream &s) override
977 {
978 NodeParams common_params = { name(), s.hints().target_hint };
979 NodeIdxPair input0 = { s.tail_node(), 0 };
980 NodeIdxPair input1 = { _ss.tail_node(), 0 };
981 return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
982 }
983
984private:
985 SubStream _ss;
986 PriorBoxLayerInfo _prior_info;
987};
988
Isabella Gottardi3db1ba92019-05-17 12:35:20 +0100989/** Quantization Layer */
990class QuantizationLayer final : public ILayer
991{
992public:
993 /** Construct a quantization layer.
994 *
995 * @param[in] out_quant_info Output tensor quantization info
996 */
997 QuantizationLayer(QuantizationInfo out_quant_info)
998 : _out_quant_info(out_quant_info)
999 {
1000 }
1001
1002 NodeID create_layer(IStream &s) override
1003 {
1004 NodeParams common_params = { name(), s.hints().target_hint };
1005 NodeIdxPair input = { s.tail_node(), 0 };
1006 return GraphBuilder::add_quantization_node(s.graph(), common_params, input, _out_quant_info);
1007 }
1008
1009private:
1010 QuantizationInfo _out_quant_info;
1011};
1012
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001013/** Reorg Layer */
1014class ReorgLayer final : public ILayer
1015{
1016public:
1017 /** Construct a reorg layer.
1018 *
1019 * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
1020 * It defines the spatial distance between 2 consecutive pixels in the x and y direction
1021 */
1022 ReorgLayer(int stride)
1023 : _stride(stride)
1024 {
1025 }
1026
1027 NodeID create_layer(IStream &s) override
1028 {
1029 NodeParams common_params = { name(), s.hints().target_hint };
1030 NodeIdxPair input = { s.tail_node(), 0 };
1031 return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
1032 }
1033
1034private:
1035 int _stride;
1036};
1037
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001038/** Reshape Layer */
1039class ReshapeLayer final : public ILayer
1040{
1041public:
Alex Gildayc357c472018-03-21 13:54:09 +00001042 /** Construct a reshape layer.
1043 *
1044 * @param[in] shape Target shape.
1045 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001046 ReshapeLayer(TensorShape shape)
1047 : _shape(shape)
1048 {
1049 }
1050
1051 NodeID create_layer(IStream &s) override
1052 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001053 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001054 NodeIdxPair input = { s.tail_node(), 0 };
1055 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
1056 }
1057
1058private:
1059 TensorShape _shape;
1060};
1061
Georgios Pinitas087eaf62018-05-16 15:52:35 +01001062/** Resize Layer */
1063class ResizeLayer final : public ILayer
1064{
1065public:
1066 ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
1067 : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
1068 {
1069 }
1070
1071 NodeID create_layer(IStream &s) override
1072 {
1073 NodeParams common_params = { name(), s.hints().target_hint };
1074 NodeIdxPair input = { s.tail_node(), 0 };
1075 return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
1076 }
1077
1078private:
1079 InterpolationPolicy _policy;
1080 float _width_scale;
1081 float _height_scale;
1082};
1083
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001084/** ROIAlign Layer */
1085class ROIAlignLayer final : public ILayer
1086{
1087public:
1088 /** Construct a RoiAlign layer.
1089 *
1090 * @param[in] sub_stream_input Graph sub-stream for the input
1091 * @param[in] sub_stream_rois Graph sub-stream for the rois
1092 * @param[in] pool_info Pooling information.
1093 */
1094 ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
1095 : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
1096 {
1097 }
1098
1099 /** Prevent instances of this class from being copy constructed */
1100 ROIAlignLayer(const ROIAlignLayer &) = delete;
1101 /** Prevent instances of this class from being copied */
1102 ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
1103
1104 NodeID create_layer(IStream &s) override
1105 {
1106 NodeParams common_params = { name(), s.hints().target_hint };
1107 NodeIdxPair input = { _ss_input.tail_node(), 0 };
1108 NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
1109 return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
1110 }
1111
1112private:
1113 SubStream _ss_input;
1114 SubStream _ss_rois;
1115 ROIPoolingLayerInfo _pool_info;
1116};
1117
Isabella Gottardi88d5b222018-04-06 12:24:55 +01001118/** Scale Layer */
1119class ScaleLayer final : public ILayer
1120{
1121public:
1122 /** Construct a scale layer.
1123 *
1124 * @param[in] mul_w Accessor to get mul weight from.
1125 * @param[in] add_w Accessor to get add weight from.
1126 */
1127 ScaleLayer(ITensorAccessorUPtr mul_w,
1128 ITensorAccessorUPtr add_w)
1129 : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
1130 {
1131 }
1132
1133 NodeID create_layer(IStream &s) override
1134 {
1135 NodeParams common_params = { name(), s.hints().target_hint };
1136 NodeIdxPair input = { s.tail_node(), 0 };
1137 return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
1138 }
1139
1140private:
1141 ITensorAccessorUPtr _mul_w;
1142 ITensorAccessorUPtr _add_w;
1143};
1144
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001145/** Slice Layer */
1146class SliceLayer final : public ILayer
1147{
1148public:
1149 /** Construct a slice layer.
1150 *
1151 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1152 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1153 */
1154 SliceLayer(Coordinates &starts, Coordinates &ends)
1155 : _starts(starts), _ends(ends)
1156 {
1157 }
1158
1159 NodeID create_layer(IStream &s) override
1160 {
1161 NodeParams common_params = { name(), s.hints().target_hint };
1162 NodeIdxPair input = { s.tail_node(), 0 };
1163 return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
1164 }
1165
1166private:
1167 Coordinates _starts;
1168 Coordinates _ends;
1169};
1170
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001171/** Softmax Layer */
1172class SoftmaxLayer final : public ILayer
1173{
1174public:
Alex Gildayc357c472018-03-21 13:54:09 +00001175 /** Construct a softmax layer.
1176 *
1177 * @param[in] beta (Optional) Beta value. Default 1.0.
1178 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001179 SoftmaxLayer(float beta = 1.0f)
1180 : _beta(beta)
1181 {
1182 }
1183
1184 NodeID create_layer(IStream &s) override
1185 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001186 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001187 NodeIdxPair input = { s.tail_node(), 0 };
1188 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
1189 }
1190
1191private:
1192 float _beta;
1193};
Michalis Spyrou96f67692018-09-13 11:39:28 +01001194
Michele Di Giorgioec699752019-03-22 15:25:32 +00001195/** Stack Layer */
1196class StackLayer final : public ILayer
1197{
1198public:
1199 /** Construct a concatenation layer
1200 *
1201 * @param[in] sub_stream1 First graph branch
1202 * @param[in] sub_stream2 Second graph branch
1203 * @param[in] rest_sub_streams Rest sub-graph branches
1204 */
1205 template <typename... Ts>
1206 StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1207 : _sub_streams(), _axis(0)
1208 {
1209 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
1210 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
1211
1212 utility::for_each([&](SubStream && sub_stream)
1213 {
1214 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1215 },
1216 std::move(rest_sub_streams)...);
1217 }
1218 /** Construct a concatenation layer
1219 *
1220 * @param[in] axis Stack layer axis along which to stack the inputs
1221 * @param[in] sub_stream1 First graph branch
1222 * @param[in] sub_stream2 Second graph branch
1223 * @param[in] rest_sub_streams Rest sub-graph branches
1224 */
1225 template <typename... Ts>
1226 StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1227 : _sub_streams(), _axis(axis)
1228 {
1229 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
1230 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
1231
1232 utility::for_each([&](SubStream && sub_stream)
1233 {
1234 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1235 },
1236 std::move(rest_sub_streams)...);
1237 }
1238 /** Construct a concat layer
1239 *
1240 * @param[in] sub_stream Sub-stream
1241 */
1242 template <typename... Ts>
1243 StackLayer(SubStream &&sub_stream)
1244 : _sub_streams(), _axis(0)
1245 {
1246 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1247 }
1248 NodeID create_layer(IStream &s) override
1249 {
1250 NodeID nid = EmptyNodeID;
1251 NodeParams common_params = { name(), s.hints().target_hint };
1252 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
1253 {
1254 nid = _sub_streams[0]->tail_node();
1255 }
1256 else
1257 {
1258 // Collect tail nodes and stack
1259 std::vector<NodeIdxPair> nodes;
1260 for(auto &ss : _sub_streams)
1261 {
1262 if(ss && (ss->tail_node() != EmptyNodeID))
1263 {
1264 const auto tail_node = s.graph().node(ss->tail_node());
1265 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
1266 {
1267 nodes.push_back({ ss->tail_node(), 0 });
1268 }
1269 }
1270 }
1271 nid = GraphBuilder::add_stack_node(s.graph(), common_params, nodes, _axis);
1272 }
1273 return nid;
1274 }
1275
1276private:
1277 std::vector<std::unique_ptr<SubStream>> _sub_streams;
1278 int _axis;
1279};
1280
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001281/** Upsample Layer */
1282class UpsampleLayer final : public ILayer
1283{
1284public:
1285 /** Construct a Upsample layer.
1286 *
1287 * @param[in] info Stride info
1288 * @param[in] upsampling_policy Upsampling policy
1289 */
1290 UpsampleLayer(Size2D info, InterpolationPolicy upsampling_policy)
1291 : _info(info), _upsampling_policy(upsampling_policy)
1292 {
1293 }
1294
1295 NodeID create_layer(IStream &s) override
1296 {
1297 NodeParams common_params = { name(), s.hints().target_hint };
1298 NodeIdxPair input = { s.tail_node(), 0 };
1299 return GraphBuilder::add_upsample_node(s.graph(), common_params, input, _info, _upsampling_policy);
1300 }
1301
1302private:
1303 Size2D _info;
1304 InterpolationPolicy _upsampling_policy;
1305};
1306
Michalis Spyrou96f67692018-09-13 11:39:28 +01001307/** YOLO Layer */
1308class YOLOLayer final : public ILayer
1309{
1310public:
1311 /** Construct a YOLO layer.
1312 *
1313 * @param[in] act_info Activation info
1314 * @param[in] num_classes Number of classes to activate
1315 */
1316 YOLOLayer(ActivationLayerInfo act_info, int32_t num_classes)
1317 : _act_info(act_info), _num_classes(num_classes)
1318 {
1319 }
1320
1321 NodeID create_layer(IStream &s) override
1322 {
1323 NodeParams common_params = { name(), s.hints().target_hint };
1324 NodeIdxPair input = { s.tail_node(), 0 };
1325 return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info, _num_classes);
1326 }
1327
1328private:
1329 ActivationLayerInfo _act_info;
1330 int32_t _num_classes;
1331};
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001332} // namespace frontend
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001333} // namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001334} // namespace arm_compute
Michalis Spyrouf4643372019-11-29 16:17:13 +00001335#endif /* ARM_COMPUTE_GRAPH_LAYERS_H */