blob: 420f7156747ae3c77e55789d7e2169e6b69a383d [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00002 * Copyright (c) 2018-2020 ARM Limited.
Georgios Pinitasd8734b52017-12-22 15:27:52 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_LAYERS_H
25#define ARM_COMPUTE_GRAPH_LAYERS_H
Georgios Pinitasd8734b52017-12-22 15:27:52 +000026
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010027#include "arm_compute/graph/GraphBuilder.h"
28#include "arm_compute/graph/Types.h"
29#include "arm_compute/graph/frontend/ILayer.h"
30#include "arm_compute/graph/frontend/IStream.h"
31#include "arm_compute/graph/frontend/SubStream.h"
Georgios Pinitasd8734b52017-12-22 15:27:52 +000032
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010040namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +000041{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010060 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000061 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
Michalis Spyrou1a569a32019-09-10 17:20:34 +010069/** Constant Layer */
70class ConstantLayer final : public ILayer
71{
72public:
73 /** Construct a constant layer.
74 *
75 * @param[in] desc Description of input tensor.
76 * @param[in] accessor Accessor to get input tensor data from.
77 */
78 ConstantLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
79 : _desc(desc), _accessor(std::move(accessor))
80 {
81 }
82
83 NodeID create_layer(IStream &s) override
84 {
85 NodeParams common_params = { name(), s.hints().target_hint };
86 return GraphBuilder::add_const_node(s.graph(), common_params, _desc, std::move(_accessor));
87 }
88
89private:
90 TensorDescriptor _desc;
91 ITensorAccessorUPtr _accessor;
92};
93
Georgios Pinitasd8734b52017-12-22 15:27:52 +000094/** Output Layer */
95class OutputLayer final : public ILayer
96{
97public:
Alex Gildayc357c472018-03-21 13:54:09 +000098 /** Construct an output layer.
99 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000100 * @param[in] accessor Accessor to give output tensor data to.
101 * @param[in] connection_idx (Optional) Input connection index
Alex Gildayc357c472018-03-21 13:54:09 +0000102 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000103 OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx = 0)
104 : _accessor(std::move(accessor)), _connection_idx(connection_idx)
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000105 {
106 }
107
108 NodeID create_layer(IStream &s) override
109 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100110 NodeParams common_params = { name(), s.hints().target_hint };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000111 NodeIdxPair input = { s.tail_node(), _connection_idx };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000112 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
113 }
114
115private:
116 ITensorAccessorUPtr _accessor;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000117 unsigned int _connection_idx;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000118};
119
120/** Activation Layer */
121class ActivationLayer final : public ILayer
122{
123public:
Alex Gildayc357c472018-03-21 13:54:09 +0000124 /** Construct an activation layer.
125 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000126 * @param[in] act_info Activation information
127 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000128 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000129 ActivationLayer(ActivationLayerInfo act_info,
130 const QuantizationInfo out_quant_info = QuantizationInfo())
131 : _act_info(act_info),
132 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000133 {
134 }
135
136 NodeID create_layer(IStream &s) override
137 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100138 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000139 NodeIdxPair input = { s.tail_node(), 0 };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000140 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info, std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000141 }
142
143private:
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000144 ActivationLayerInfo _act_info;
145 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000146};
147
148/** Batchnormalization Layer */
149class BatchNormalizationLayer final : public ILayer
150{
151public:
Alex Gildayc357c472018-03-21 13:54:09 +0000152 /** Construct a batch normalization layer.
153 *
154 * @param[in] mean Accessor to get mean tensor data from.
155 * @param[in] var Accessor to get var tensor data from.
156 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
157 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
158 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
159 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000160 BatchNormalizationLayer(ITensorAccessorUPtr mean,
161 ITensorAccessorUPtr var,
162 ITensorAccessorUPtr gamma = nullptr,
163 ITensorAccessorUPtr beta = nullptr,
164 float epsilon = 0.001f)
165 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
166 {
167 }
168
169 NodeID create_layer(IStream &s) override
170 {
171 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
172 ARM_COMPUTE_ERROR_ON(_var == nullptr);
173
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100174 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000175 NodeIdxPair input = { s.tail_node(), 0 };
176 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
177 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
178 }
179
180private:
181 ITensorAccessorUPtr _mean;
182 ITensorAccessorUPtr _var;
183 ITensorAccessorUPtr _gamma;
184 ITensorAccessorUPtr _beta;
185 float _epsilon;
186};
187
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100188/** Bounding Box Transform Layer */
189class BoundingBoxTransformLayer final : public ILayer
190{
191public:
192 /** Construct a bounding box transform layer.
193 *
194 * @param[in] sub_stream_input Graph sub-stream for the input
195 * @param[in] sub_stream_deltas Graph sub-stream for the deltas
196 * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
197 */
198 BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
199 : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
200 {
201 }
202
203 /** Create layer and add to the given stream.
204 *
205 * @param[in] s Stream to add layer to.
206 *
207 * @return ID of the created node.
208 */
209 NodeID create_layer(IStream &s) override
210 {
211 NodeParams common_params = { name(), s.hints().target_hint };
212 NodeIdxPair input = { _ss_input.tail_node(), 0 };
213 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
214 return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
215 }
216
217private:
218 SubStream _ss_input;
219 SubStream _ss_deltas;
220 BoundingBoxTransformInfo _bbox_info;
221};
222
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100223/** Channel Shuffle Layer */
224class ChannelShuffleLayer final : public ILayer
225{
226public:
227 /** Construct a Channel Shuffle layer.
228 *
229 * @param[in] num_groups Number of groups
230 */
231 ChannelShuffleLayer(unsigned int num_groups)
232 : _num_groups(num_groups)
233 {
234 }
235
236 NodeID create_layer(IStream &s) override
237 {
238 NodeParams common_params = { name(), s.hints().target_hint };
239 NodeIdxPair input = { s.tail_node(), 0 };
240 return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
241 }
242
243private:
244 unsigned int _num_groups;
245};
246
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100247/** Concat Layer */
248class ConcatLayer final : public ILayer
249{
250public:
251 /** Construct a concatenation layer
252 *
253 * @param[in] sub_stream1 First graph branch
254 * @param[in] sub_stream2 Second graph branch
255 * @param[in] rest_sub_streams Rest sub-graph branches
256 */
257 template <typename... Ts>
258 ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000259 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Pablo Tello32521432018-11-15 14:43:10 +0000260 {
261 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
262 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
263
264 utility::for_each([&](SubStream && sub_stream)
265 {
266 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
267 },
268 std::move(rest_sub_streams)...);
269 }
270 /** Construct a concatenation layer
271 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000272 * @param[in] concat_descriptor Concat layer descriptor
273 * @param[in] sub_stream1 First graph branch
274 * @param[in] sub_stream2 Second graph branch
275 * @param[in] rest_sub_streams Rest sub-graph branches
Pablo Tello32521432018-11-15 14:43:10 +0000276 */
277 template <typename... Ts>
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000278 ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
279 : _sub_streams(), _concat_descriptor(concat_descriptor)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100280 {
281 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
282 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
283
284 utility::for_each([&](SubStream && sub_stream)
285 {
286 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
287 },
288 std::move(rest_sub_streams)...);
289 }
290 /** Construct a concat layer
291 *
292 * @param[in] sub_stream Sub-stream
293 */
294 template <typename... Ts>
295 ConcatLayer(SubStream &&sub_stream)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000296 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100297 {
298 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
299 }
300 NodeID create_layer(IStream &s) override
301 {
302 NodeID nid = EmptyNodeID;
303 NodeParams common_params = { name(), s.hints().target_hint };
304 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
305 {
306 nid = _sub_streams[0]->tail_node();
307 }
308 else
309 {
310 // Collect tail nodes and concatenate
311 std::vector<NodeIdxPair> nodes;
312 for(auto &ss : _sub_streams)
313 {
314 if(ss && (ss->tail_node() != EmptyNodeID))
315 {
316 const auto tail_node = s.graph().node(ss->tail_node());
317 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
318 {
319 nodes.push_back({ ss->tail_node(), 0 });
320 }
321 }
322 }
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000323 nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _concat_descriptor);
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100324 }
325 return nid;
326 }
327
328private:
329 std::vector<std::unique_ptr<SubStream>> _sub_streams;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000330 descriptors::ConcatLayerDescriptor _concat_descriptor;
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100331};
332
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000333/** Convolution Layer */
334class ConvolutionLayer final : public ILayer
335{
336public:
Alex Gildayc357c472018-03-21 13:54:09 +0000337 /** Construct a convolution layer.
338 *
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100339 * @param[in] conv_width Convolution width.
340 * @param[in] conv_height Convolution height.
341 * @param[in] ofm Output feature map.
342 * @param[in] weights Accessor to get kernel weights from.
343 * @param[in] bias Accessor to get kernel bias from.
344 * @param[in] conv_info Padding and stride information.
345 * @param[in] num_groups (Optional) Number of groups. Default: 1.
346 * @param[in] weights_quant_info (Optional) Weights quantization information
347 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000348 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100349 ConvolutionLayer(unsigned int conv_width,
350 unsigned int conv_height,
351 unsigned int ofm,
352 ITensorAccessorUPtr weights,
353 ITensorAccessorUPtr bias,
354 PadStrideInfo conv_info,
355 unsigned int num_groups = 1,
356 const QuantizationInfo weights_quant_info = QuantizationInfo(),
357 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000358 : _conv_width(conv_width),
359 _conv_height(conv_height),
360 _ofm(ofm),
361 _conv_info(std::move(conv_info)),
362 _num_groups(num_groups),
363 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100364 _bias(std::move(bias)),
365 _weights_quant_info(std::move(weights_quant_info)),
366 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000367 {
368 }
369
370 NodeID create_layer(IStream &s) override
371 {
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000372 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100373 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000374 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
Georgios Pinitasee33ea52018-03-08 16:01:29 +0000375 Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
Giorgio Arena59631a12018-05-02 13:59:04 +0100376 s.hints().convolution_method_hint, s.hints().fast_math_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100377 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000378 }
379
380private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100381 unsigned int _conv_width;
382 unsigned int _conv_height;
383 unsigned int _ofm;
384 const PadStrideInfo _conv_info;
385 unsigned int _num_groups;
386 ITensorAccessorUPtr _weights;
387 ITensorAccessorUPtr _bias;
388 const QuantizationInfo _weights_quant_info;
389 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000390};
391
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100392/** Deconvolution Layer */
393class DeconvolutionLayer final : public ILayer
394{
395public:
396 /** Construct a convolution layer.
397 *
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100398 * @param[in] conv_width Convolution width.
399 * @param[in] conv_height Convolution height.
400 * @param[in] ofm Output feature map.
401 * @param[in] weights Accessor to get kernel weights from.
402 * @param[in] bias Accessor to get kernel bias from.
403 * @param[in] deconv_info Padding and stride information.
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100404 */
405 DeconvolutionLayer(unsigned int conv_width,
406 unsigned int conv_height,
407 unsigned int ofm,
408 ITensorAccessorUPtr weights,
409 ITensorAccessorUPtr bias,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100410 PadStrideInfo deconv_info)
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100411 : _conv_width(conv_width),
412 _conv_height(conv_height),
413 _ofm(ofm),
414 _deconv_info(std::move(deconv_info)),
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100415 _weights(std::move(weights)),
416 _bias(std::move(bias))
417 {
418 }
419
420 NodeID create_layer(IStream &s) override
421 {
422 NodeIdxPair input = { s.tail_node(), 0 };
423 NodeParams common_params = { name(), s.hints().target_hint };
424 return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100425 Size2D(_conv_width, _conv_height), _ofm, _deconv_info,
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100426 std::move(_weights), std::move(_bias));
427 }
428
429private:
430 unsigned int _conv_width;
431 unsigned int _conv_height;
432 unsigned int _ofm;
433 const PadStrideInfo _deconv_info;
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100434 ITensorAccessorUPtr _weights;
435 ITensorAccessorUPtr _bias;
436};
437
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000438/** Depthwise Convolution Layer */
439class DepthwiseConvolutionLayer final : public ILayer
440{
441public:
Alex Gildayc357c472018-03-21 13:54:09 +0000442 /** Construct a depthwise convolution layer.
443 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000444 * @param[in] conv_width Convolution width.
445 * @param[in] conv_height Convolution height.
446 * @param[in] weights Accessor to get kernel weights from.
447 * @param[in] bias Accessor to get kernel bias from.
448 * @param[in] conv_info Padding and stride information.
449 * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
450 * @param[in] weights_quant_info (Optional) Quantization info used for weights
451 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000452 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100453 DepthwiseConvolutionLayer(unsigned int conv_width,
454 unsigned int conv_height,
455 ITensorAccessorUPtr weights,
456 ITensorAccessorUPtr bias,
457 PadStrideInfo conv_info,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000458 int depth_multiplier = 1,
459 const QuantizationInfo weights_quant_info = QuantizationInfo(),
460 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000461 : _conv_width(conv_width),
462 _conv_height(conv_height),
463 _conv_info(std::move(conv_info)),
464 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100465 _bias(std::move(bias)),
Georgios Pinitas05045c12018-12-07 18:31:47 +0000466 _depth_multiplier(depth_multiplier),
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000467 _weights_quant_info(std::move(weights_quant_info)),
468 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000469 {
470 }
471
472 NodeID create_layer(IStream &s) override
473 {
474 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100475 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000476 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000477 input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000478 s.hints().depthwise_convolution_method_hint,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000479 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000480 }
481
482private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100483 unsigned int _conv_width;
484 unsigned int _conv_height;
485 const PadStrideInfo _conv_info;
486 ITensorAccessorUPtr _weights;
487 ITensorAccessorUPtr _bias;
Georgios Pinitas05045c12018-12-07 18:31:47 +0000488 int _depth_multiplier;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000489 const QuantizationInfo _weights_quant_info;
490 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000491};
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000492/** Dequantization Layer */
493class DequantizationLayer final : public ILayer
494{
495public:
496 /** Construct a dequantization layer.
497 *
498 */
499 DequantizationLayer()
500 {
501 }
502
503 NodeID create_layer(IStream &s) override
504 {
505 NodeParams common_params = { name(), s.hints().target_hint };
506 NodeIdxPair input = { s.tail_node(), 0 };
507 return GraphBuilder::add_dequantization_node(s.graph(), common_params, input);
508 }
509};
510
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000511/** DetectionOutput Layer */
512class DetectionOutputLayer final : public ILayer
513{
514public:
515 /** Construct a detection output layer.
516 *
517 * @param[in] sub_stream_conf Confidence graph sub-stream.
518 * @param[in] sub_stream_prior PriorBox graph sub-stream.
519 * @param[in] detect_info DetectionOutput parameters.
520 */
Georgios Pinitasf52cd782019-03-25 14:06:14 +0000521 DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, const DetectionOutputLayerInfo &detect_info)
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000522 : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
523 {
524 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000525
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000526 NodeID create_layer(IStream &s) override
527 {
528 NodeParams common_params = { name(), s.hints().target_hint };
529 NodeIdxPair input_loc = { s.tail_node(), 0 };
530 NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
531 NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
532 return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
533 }
534
535private:
536 SubStream _ss_conf;
537 SubStream _ss_prior;
538 DetectionOutputLayerInfo _detect_info;
539};
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000540/** DetectionOutputPostProcess Layer */
541class DetectionPostProcessLayer final : public ILayer
542{
543public:
544 /** Construct a detection output layer.
545 *
546 * @param[in] sub_stream_class_prediction Class prediction graph sub-stream.
547 * @param[in] detect_info DetectionOutput parameters.
548 * @param[in] anchors Accessor to get anchors tensor data from.
549 * @param[in] out_quant_info (Optional) Output quantization info
550 */
551 DetectionPostProcessLayer(SubStream &&sub_stream_class_prediction, DetectionPostProcessLayerInfo detect_info, ITensorAccessorUPtr anchors,
552 const QuantizationInfo out_quant_info = QuantizationInfo())
553 : _sub_stream_class_prediction(std::move(sub_stream_class_prediction)), _detect_info(detect_info), _anchors(std::move(anchors)), _out_quant_info(std::move(out_quant_info))
554 {
555 }
556
557 NodeID create_layer(IStream &s) override
558 {
559 ARM_COMPUTE_ERROR_ON(_anchors == nullptr);
560
561 NodeParams common_params = { name(), s.hints().target_hint };
562 NodeIdxPair input_box_encoding = { s.tail_node(), 0 };
563 NodeIdxPair input_class_prediction = { _sub_stream_class_prediction.tail_node(), 0 };
564 return GraphBuilder::add_detection_post_process_node(s.graph(), common_params, input_box_encoding, input_class_prediction, _detect_info, std::move(_anchors), std::move(_out_quant_info));
565 }
566
567private:
568 SubStream _sub_stream_class_prediction;
569 DetectionPostProcessLayerInfo _detect_info;
570 ITensorAccessorUPtr _anchors;
571 const QuantizationInfo _out_quant_info;
572};
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100573/** Dummy Layer */
574class DummyLayer final : public ILayer
575{
576public:
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000577 /** Construct a dummy layer.
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100578 *
579 * @param[in] shape Output shape
580 */
581 DummyLayer(TensorShape shape)
582 : _shape(shape)
583 {
584 }
585
586 NodeID create_layer(IStream &s) override
587 {
588 NodeParams common_params = { name(), s.hints().target_hint };
589 NodeIdxPair input = { s.tail_node(), 0 };
590 return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
591 }
592
593private:
594 TensorShape _shape;
595};
596
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100597class EltwiseLayer final : public ILayer
598{
599public:
600 /** Construct an element-wise operation layer
601 *
602 * @param[in] sub_stream0 First graph sub-stream
603 * @param[in] sub_stream1 First graph sub-stream
604 * @param[in] op Element-wise operation to perform
605 */
606 EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
607 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
608 {
609 }
610
611 NodeID create_layer(IStream &s) override
612 {
613 NodeParams common_params = { name(), s.hints().target_hint };
614 NodeIdxPair input0 = { _ss0.tail_node(), 0 };
615 NodeIdxPair input1 = { _ss1.tail_node(), 0 };
616
617 return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
618 }
619
620private:
621 SubStream _ss0;
622 SubStream _ss1;
623 EltwiseOperation _op;
624};
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000625/** Flatten Layer */
626class FlattenLayer final : public ILayer
627{
628public:
Alex Gildayc357c472018-03-21 13:54:09 +0000629 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000630 FlattenLayer()
631 {
632 }
633
634 NodeID create_layer(IStream &s) override
635 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100636 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000637 NodeIdxPair input = { s.tail_node(), 0 };
638 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
639 }
640};
641
642/** Fully Connected Layer */
643class FullyConnectedLayer final : public ILayer
644{
645public:
Alex Gildayc357c472018-03-21 13:54:09 +0000646 /** Construct a fully connected layer.
647 *
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100648 * @param[in] num_outputs Number of outputs.
649 * @param[in] weights Accessor to get weights from.
650 * @param[in] bias Accessor to get bias from.
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100651 * @param[in] fc_info (Optional) Fully connected layer metadata
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100652 * @param[in] weights_quant_info (Optional) Weights quantization information
653 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000654 */
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100655 FullyConnectedLayer(unsigned int num_outputs,
656 ITensorAccessorUPtr weights,
657 ITensorAccessorUPtr bias,
658 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
659 const QuantizationInfo weights_quant_info = QuantizationInfo(),
660 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100661 : _num_outputs(num_outputs),
662 _weights(std::move(weights)),
663 _bias(std::move(bias)),
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000664 _weights_ss(nullptr),
665 _bias_ss(nullptr),
666 _fc_info(fc_info),
667 _weights_quant_info(std::move(weights_quant_info)),
668 _out_quant_info(std::move(out_quant_info))
669 {
670 }
671
672 /** Construct a fully connected layer.
673 *
674 * @param[in] num_outputs Number of outputs.
675 * @param[in] sub_stream_weights Graph sub-stream for the weights.
676 * @param[in] sub_stream_bias Graph sub-stream for the bias.
677 * @param[in] fc_info (Optional) Fully connected layer metadata
678 * @param[in] weights_quant_info (Optional) Weights quantization information
679 * @param[in] out_quant_info (Optional) Output quantization info
680 */
681 FullyConnectedLayer(unsigned int num_outputs,
Michalis Spyrou1a569a32019-09-10 17:20:34 +0100682 SubStream sub_stream_weights,
683 SubStream sub_stream_bias,
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000684 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
685 const QuantizationInfo weights_quant_info = QuantizationInfo(),
686 const QuantizationInfo out_quant_info = QuantizationInfo())
687 : _num_outputs(num_outputs),
688 _weights(nullptr),
689 _bias(nullptr),
690 _weights_ss(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream_weights))),
691 _bias_ss(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream_bias))),
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100692 _fc_info(fc_info),
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100693 _weights_quant_info(std::move(weights_quant_info)),
694 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000695 {
696 }
697
Michele Di Giorgio47e6fed2018-11-13 12:04:25 +0000698 /** Create layer and add to the given stream.
699 *
700 * @param[in] s Stream to add layer to.
701 *
702 * @return ID of the created node.
703 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000704 NodeID create_layer(IStream &s) override
705 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100706 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000707 NodeIdxPair input = { s.tail_node(), 0 };
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000708 if(_weights != nullptr)
709 {
710 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
711 std::move(_weights), std::move(_bias), _fc_info,
712 std::move(_weights_quant_info), std::move(_out_quant_info));
713 }
714 else
715 {
716 ARM_COMPUTE_ERROR_ON(_weights_ss == nullptr);
717
718 NodeID bias_nid = (_bias_ss == nullptr) ? EmptyNodeID : _bias_ss->tail_node();
719 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
720 _weights_ss->tail_node(), bias_nid, _fc_info,
721 std::move(_out_quant_info));
722 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000723 }
724
725private:
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100726 unsigned int _num_outputs;
727 ITensorAccessorUPtr _weights;
728 ITensorAccessorUPtr _bias;
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000729 std::unique_ptr<SubStream> _weights_ss;
730 std::unique_ptr<SubStream> _bias_ss;
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100731 const FullyConnectedLayerInfo _fc_info;
732 const QuantizationInfo _weights_quant_info;
733 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000734};
735
Manuel Bottini5209be52019-02-13 16:34:56 +0000736/** Generate Proposals Layer */
737class GenerateProposalsLayer final : public ILayer
738{
739public:
740 /** Construct a generate proposals layer.
741 *
742 * @param[in] ss_scores Graph sub-stream for the scores.
743 * @param[in] ss_deltas Graph sub-stream for the deltas.
744 * @param[in] ss_anchors Graph sub-stream for the anchors.
745 * @param[in] info Generate Proposals operation information.
746 */
747 GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
748 : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
749 {
750 }
751
752 /** Create layer and add to the given stream.
753 *
754 * @param[in] s Stream to add layer to.
755 *
756 * @return ID of the created node.
757 */
758 NodeID create_layer(IStream &s) override
759 {
760 NodeParams common_params = { name(), s.hints().target_hint };
761 NodeIdxPair scores = { _ss_scores.tail_node(), 0 };
762 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
763 NodeIdxPair anchors = { _ss_anchors.tail_node(), 0 };
764 return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
765 }
766
767private:
768 SubStream _ss_scores;
769 SubStream _ss_deltas;
770 SubStream _ss_anchors;
771 GenerateProposalsInfo _info;
772};
773
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000774/** Normalization Layer */
775class NormalizationLayer final : public ILayer
776{
777public:
Alex Gildayc357c472018-03-21 13:54:09 +0000778 /** Construct a normalization layer.
779 *
780 * @param[in] norm_info Normalization information.
781 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000782 NormalizationLayer(NormalizationLayerInfo norm_info)
783 : _norm_info(norm_info)
784 {
785 }
786
787 NodeID create_layer(IStream &s) override
788 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100789 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000790 NodeIdxPair input = { s.tail_node(), 0 };
791 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
792 }
793
794private:
795 NormalizationLayerInfo _norm_info;
796};
797
Michele Di Giorgio555d1102018-09-12 13:51:59 +0100798/** Normalize planar YUV Layer */
799class NormalizePlanarYUVLayer final : public ILayer
800{
801public:
802 /** Construct a normalize planar YUV layer.
803 *
804 * @param[in] mean Accessor to get mean tensor data from.
805 * @param[in] std Accessor to get std tensor data from.
806 */
807 NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
808 ITensorAccessorUPtr std)
809 : _mean(std::move(mean)), _std(std::move(std))
810 {
811 }
812
813 NodeID create_layer(IStream &s) override
814 {
815 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
816 ARM_COMPUTE_ERROR_ON(_std == nullptr);
817
818 NodeParams common_params = { name(), s.hints().target_hint };
819 NodeIdxPair input = { s.tail_node(), 0 };
820 return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
821 std::move(_mean), std::move(_std));
822 }
823
824private:
825 ITensorAccessorUPtr _mean;
826 ITensorAccessorUPtr _std;
827};
828
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100829/** Pad Layer */
830class PadLayer final : public ILayer
831{
832public:
833 /** Construct a pad layer.
834 *
835 * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
836 * specifies the front and the end padding in the i-th dimension.
837 */
838 PadLayer(PaddingList padding)
839 : _padding(padding)
840 {
841 }
842
843 NodeID create_layer(IStream &s) override
844 {
845 NodeParams common_params = { name(), s.hints().target_hint };
846 NodeIdxPair input = { s.tail_node(), 0 };
847 return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding);
848 }
849
850private:
851 PaddingList _padding;
852};
853
Georgios Pinitas57c48242018-08-02 13:41:49 +0100854/** Permute Layer */
855class PermuteLayer final : public ILayer
856{
857public:
858 /** Construct a permute layer.
859 *
860 * @param[in] perm Permutation vector.
861 * @param[in] layout (Optional) Data layout to assign to permuted tensor.
862 * If UNKNOWN then the input's layout will be used.
863 */
864 PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
865 : _perm(perm), _layout(layout)
866 {
867 }
868
869 NodeID create_layer(IStream &s) override
870 {
871 NodeParams common_params = { name(), s.hints().target_hint };
872 NodeIdxPair input = { s.tail_node(), 0 };
873 return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
874 }
875
876private:
877 PermutationVector _perm;
878 DataLayout _layout;
879};
880
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000881/** Pooling Layer */
882class PoolingLayer final : public ILayer
883{
884public:
Alex Gildayc357c472018-03-21 13:54:09 +0000885 /** Construct a pooling layer.
886 *
887 * @param[in] pool_info Pooling information.
888 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000889 PoolingLayer(PoolingLayerInfo pool_info)
890 : _pool_info(pool_info)
891 {
892 }
893
894 NodeID create_layer(IStream &s) override
895 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100896 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000897 NodeIdxPair input = { s.tail_node(), 0 };
898 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
899 }
900
901private:
902 PoolingLayerInfo _pool_info;
903};
904
Georgios Pinitasf8c47492020-02-04 17:39:59 +0000905/** PRelu Layer */
906class PReluLayer final : public ILayer
907{
908public:
909 /** Construct an PRelu operation layer
910 *
911 * @param[in] sub_stream0 First graph sub-stream
912 * @param[in] sub_stream1 First graph sub-stream
913 */
914 PReluLayer(SubStream &&sub_stream0, SubStream &&sub_stream1)
915 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1))
916 {
917 }
918
919 NodeID create_layer(IStream &s) override
920 {
921 NodeParams common_params = { name(), s.hints().target_hint };
922 NodeIdxPair input = { _ss0.tail_node(), 0 };
923 NodeIdxPair alpha = { _ss1.tail_node(), 0 };
924
925 return GraphBuilder::add_prelu_node(s.graph(), common_params, input, alpha);
926 }
927
928private:
929 SubStream _ss0;
930 SubStream _ss1;
931};
932
Giorgio Arena6e9d0e02020-01-03 15:02:04 +0000933/** Print Layer */
934class PrintLayer final : public ILayer
935{
936public:
937 /** Construct a print layer.
938 *
939 * Example usage to locally dequantize and print a tensor:
940 *
941 * Tensor *output = new Tensor();
942 * const auto transform = [output](ITensor *input)
943 * {
944 * output->allocator()->init(*input->info());
945 * output->info()->set_data_type(DataType::F32);
946 * output->allocator()->allocate();
947 *
948 * Window win;
949 * win.use_tensor_dimensions(input->info()->tensor_shape());
950 * Iterator in(input, win);
951 * Iterator out(output, win);
952 * execute_window_loop(win, [&](const Coordinates &)
953 * {
954 * *(reinterpret_cast<float *>(out.ptr())) = dequantize_qasymm8(*in.ptr(), input->info()->quantization_info().uniform());
955 * }, in, out);
956 *
957 * return output;
958 * };
959 *
960 * graph << InputLayer(input_descriptor.set_quantization_info(in_quant_info), get_input_accessor(common_params, nullptr, false))
961 * << ...
962 * << \\ CNN Layers
963 * << ...
964 * << PrintLayer(std::cout, IOFormatInfo(), transform)
965 * << ...
966 * << OutputLayer(get_output_accessor(common_params, 5));
967 *
968 * @param[in] stream Output stream.
969 * @param[in] format_info (Optional) Format info.
970 * @param[in] transform (Optional) Input transform function.
971 */
972 PrintLayer(std::ostream &stream, const IOFormatInfo &format_info = IOFormatInfo(), const std::function<ITensor *(ITensor *)> transform = nullptr)
973 : _stream(stream), _format_info(format_info), _transform(transform)
974 {
975 }
976
977 NodeID create_layer(IStream &s) override
978 {
979 NodeParams common_params = { name(), s.hints().target_hint };
980 NodeIdxPair input = { s.tail_node(), 0 };
981 return GraphBuilder::add_print_node(s.graph(), common_params, input, _stream, _format_info, _transform);
982 }
983
984private:
985 std::ostream &_stream;
986 const IOFormatInfo &_format_info;
987 const std::function<ITensor *(ITensor *)> _transform;
988};
989
Pablo Tello32521432018-11-15 14:43:10 +0000990/** PriorBox Layer */
991class PriorBoxLayer final : public ILayer
992{
993public:
994 /** Construct a priorbox layer.
995 *
996 * @param[in] sub_stream First graph sub-stream
997 * @param[in] prior_info PriorBox parameters.
998 */
Georgios Pinitasf52cd782019-03-25 14:06:14 +0000999 PriorBoxLayer(SubStream &&sub_stream, const PriorBoxLayerInfo &prior_info)
Pablo Tello32521432018-11-15 14:43:10 +00001000 : _ss(std::move(sub_stream)), _prior_info(prior_info)
1001 {
1002 }
1003
1004 NodeID create_layer(IStream &s) override
1005 {
1006 NodeParams common_params = { name(), s.hints().target_hint };
1007 NodeIdxPair input0 = { s.tail_node(), 0 };
1008 NodeIdxPair input1 = { _ss.tail_node(), 0 };
1009 return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
1010 }
1011
1012private:
1013 SubStream _ss;
1014 PriorBoxLayerInfo _prior_info;
1015};
1016
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001017/** Quantization Layer */
1018class QuantizationLayer final : public ILayer
1019{
1020public:
1021 /** Construct a quantization layer.
1022 *
1023 * @param[in] out_quant_info Output tensor quantization info
1024 */
1025 QuantizationLayer(QuantizationInfo out_quant_info)
1026 : _out_quant_info(out_quant_info)
1027 {
1028 }
1029
1030 NodeID create_layer(IStream &s) override
1031 {
1032 NodeParams common_params = { name(), s.hints().target_hint };
1033 NodeIdxPair input = { s.tail_node(), 0 };
1034 return GraphBuilder::add_quantization_node(s.graph(), common_params, input, _out_quant_info);
1035 }
1036
1037private:
1038 QuantizationInfo _out_quant_info;
1039};
1040
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001041/** Reorg Layer */
1042class ReorgLayer final : public ILayer
1043{
1044public:
1045 /** Construct a reorg layer.
1046 *
1047 * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
1048 * It defines the spatial distance between 2 consecutive pixels in the x and y direction
1049 */
1050 ReorgLayer(int stride)
1051 : _stride(stride)
1052 {
1053 }
1054
1055 NodeID create_layer(IStream &s) override
1056 {
1057 NodeParams common_params = { name(), s.hints().target_hint };
1058 NodeIdxPair input = { s.tail_node(), 0 };
1059 return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
1060 }
1061
1062private:
1063 int _stride;
1064};
1065
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001066/** Reshape Layer */
1067class ReshapeLayer final : public ILayer
1068{
1069public:
Alex Gildayc357c472018-03-21 13:54:09 +00001070 /** Construct a reshape layer.
1071 *
1072 * @param[in] shape Target shape.
1073 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001074 ReshapeLayer(TensorShape shape)
1075 : _shape(shape)
1076 {
1077 }
1078
1079 NodeID create_layer(IStream &s) override
1080 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001081 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001082 NodeIdxPair input = { s.tail_node(), 0 };
1083 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
1084 }
1085
1086private:
1087 TensorShape _shape;
1088};
1089
Georgios Pinitas087eaf62018-05-16 15:52:35 +01001090/** Resize Layer */
1091class ResizeLayer final : public ILayer
1092{
1093public:
1094 ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
1095 : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
1096 {
1097 }
1098
1099 NodeID create_layer(IStream &s) override
1100 {
1101 NodeParams common_params = { name(), s.hints().target_hint };
1102 NodeIdxPair input = { s.tail_node(), 0 };
1103 return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
1104 }
1105
1106private:
1107 InterpolationPolicy _policy;
1108 float _width_scale;
1109 float _height_scale;
1110};
1111
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001112/** ROIAlign Layer */
1113class ROIAlignLayer final : public ILayer
1114{
1115public:
1116 /** Construct a RoiAlign layer.
1117 *
1118 * @param[in] sub_stream_input Graph sub-stream for the input
1119 * @param[in] sub_stream_rois Graph sub-stream for the rois
1120 * @param[in] pool_info Pooling information.
1121 */
1122 ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
1123 : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
1124 {
1125 }
1126
1127 /** Prevent instances of this class from being copy constructed */
1128 ROIAlignLayer(const ROIAlignLayer &) = delete;
1129 /** Prevent instances of this class from being copied */
1130 ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
1131
1132 NodeID create_layer(IStream &s) override
1133 {
1134 NodeParams common_params = { name(), s.hints().target_hint };
1135 NodeIdxPair input = { _ss_input.tail_node(), 0 };
1136 NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
1137 return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
1138 }
1139
1140private:
1141 SubStream _ss_input;
1142 SubStream _ss_rois;
1143 ROIPoolingLayerInfo _pool_info;
1144};
1145
Isabella Gottardi88d5b222018-04-06 12:24:55 +01001146/** Scale Layer */
1147class ScaleLayer final : public ILayer
1148{
1149public:
1150 /** Construct a scale layer.
1151 *
1152 * @param[in] mul_w Accessor to get mul weight from.
1153 * @param[in] add_w Accessor to get add weight from.
1154 */
1155 ScaleLayer(ITensorAccessorUPtr mul_w,
1156 ITensorAccessorUPtr add_w)
1157 : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
1158 {
1159 }
1160
1161 NodeID create_layer(IStream &s) override
1162 {
1163 NodeParams common_params = { name(), s.hints().target_hint };
1164 NodeIdxPair input = { s.tail_node(), 0 };
1165 return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
1166 }
1167
1168private:
1169 ITensorAccessorUPtr _mul_w;
1170 ITensorAccessorUPtr _add_w;
1171};
1172
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001173/** Slice Layer */
1174class SliceLayer final : public ILayer
1175{
1176public:
1177 /** Construct a slice layer.
1178 *
1179 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1180 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1181 */
1182 SliceLayer(Coordinates &starts, Coordinates &ends)
1183 : _starts(starts), _ends(ends)
1184 {
1185 }
1186
1187 NodeID create_layer(IStream &s) override
1188 {
1189 NodeParams common_params = { name(), s.hints().target_hint };
1190 NodeIdxPair input = { s.tail_node(), 0 };
1191 return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
1192 }
1193
1194private:
1195 Coordinates _starts;
1196 Coordinates _ends;
1197};
1198
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001199/** Softmax Layer */
1200class SoftmaxLayer final : public ILayer
1201{
1202public:
Alex Gildayc357c472018-03-21 13:54:09 +00001203 /** Construct a softmax layer.
1204 *
1205 * @param[in] beta (Optional) Beta value. Default 1.0.
1206 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001207 SoftmaxLayer(float beta = 1.0f)
1208 : _beta(beta)
1209 {
1210 }
1211
1212 NodeID create_layer(IStream &s) override
1213 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001214 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001215 NodeIdxPair input = { s.tail_node(), 0 };
1216 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
1217 }
1218
1219private:
1220 float _beta;
1221};
Michalis Spyrou96f67692018-09-13 11:39:28 +01001222
Michele Di Giorgioec699752019-03-22 15:25:32 +00001223/** Stack Layer */
1224class StackLayer final : public ILayer
1225{
1226public:
1227 /** Construct a concatenation layer
1228 *
1229 * @param[in] sub_stream1 First graph branch
1230 * @param[in] sub_stream2 Second graph branch
1231 * @param[in] rest_sub_streams Rest sub-graph branches
1232 */
1233 template <typename... Ts>
1234 StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1235 : _sub_streams(), _axis(0)
1236 {
1237 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
1238 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
1239
1240 utility::for_each([&](SubStream && sub_stream)
1241 {
1242 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1243 },
1244 std::move(rest_sub_streams)...);
1245 }
1246 /** Construct a concatenation layer
1247 *
1248 * @param[in] axis Stack layer axis along which to stack the inputs
1249 * @param[in] sub_stream1 First graph branch
1250 * @param[in] sub_stream2 Second graph branch
1251 * @param[in] rest_sub_streams Rest sub-graph branches
1252 */
1253 template <typename... Ts>
1254 StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1255 : _sub_streams(), _axis(axis)
1256 {
1257 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
1258 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
1259
1260 utility::for_each([&](SubStream && sub_stream)
1261 {
1262 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1263 },
1264 std::move(rest_sub_streams)...);
1265 }
1266 /** Construct a concat layer
1267 *
1268 * @param[in] sub_stream Sub-stream
1269 */
1270 template <typename... Ts>
1271 StackLayer(SubStream &&sub_stream)
1272 : _sub_streams(), _axis(0)
1273 {
1274 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1275 }
1276 NodeID create_layer(IStream &s) override
1277 {
1278 NodeID nid = EmptyNodeID;
1279 NodeParams common_params = { name(), s.hints().target_hint };
1280 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
1281 {
1282 nid = _sub_streams[0]->tail_node();
1283 }
1284 else
1285 {
1286 // Collect tail nodes and stack
1287 std::vector<NodeIdxPair> nodes;
1288 for(auto &ss : _sub_streams)
1289 {
1290 if(ss && (ss->tail_node() != EmptyNodeID))
1291 {
1292 const auto tail_node = s.graph().node(ss->tail_node());
1293 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
1294 {
1295 nodes.push_back({ ss->tail_node(), 0 });
1296 }
1297 }
1298 }
1299 nid = GraphBuilder::add_stack_node(s.graph(), common_params, nodes, _axis);
1300 }
1301 return nid;
1302 }
1303
1304private:
1305 std::vector<std::unique_ptr<SubStream>> _sub_streams;
1306 int _axis;
1307};
1308
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001309/** Upsample Layer */
1310class UpsampleLayer final : public ILayer
1311{
1312public:
1313 /** Construct a Upsample layer.
1314 *
1315 * @param[in] info Stride info
1316 * @param[in] upsampling_policy Upsampling policy
1317 */
1318 UpsampleLayer(Size2D info, InterpolationPolicy upsampling_policy)
1319 : _info(info), _upsampling_policy(upsampling_policy)
1320 {
1321 }
1322
1323 NodeID create_layer(IStream &s) override
1324 {
1325 NodeParams common_params = { name(), s.hints().target_hint };
1326 NodeIdxPair input = { s.tail_node(), 0 };
1327 return GraphBuilder::add_upsample_node(s.graph(), common_params, input, _info, _upsampling_policy);
1328 }
1329
1330private:
1331 Size2D _info;
1332 InterpolationPolicy _upsampling_policy;
1333};
1334
Michalis Spyrou96f67692018-09-13 11:39:28 +01001335/** YOLO Layer */
1336class YOLOLayer final : public ILayer
1337{
1338public:
1339 /** Construct a YOLO layer.
1340 *
1341 * @param[in] act_info Activation info
1342 * @param[in] num_classes Number of classes to activate
1343 */
1344 YOLOLayer(ActivationLayerInfo act_info, int32_t num_classes)
1345 : _act_info(act_info), _num_classes(num_classes)
1346 {
1347 }
1348
1349 NodeID create_layer(IStream &s) override
1350 {
1351 NodeParams common_params = { name(), s.hints().target_hint };
1352 NodeIdxPair input = { s.tail_node(), 0 };
1353 return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info, _num_classes);
1354 }
1355
1356private:
1357 ActivationLayerInfo _act_info;
1358 int32_t _num_classes;
1359};
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001360} // namespace frontend
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001361} // namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001362} // namespace arm_compute
Michalis Spyrouf4643372019-11-29 16:17:13 +00001363#endif /* ARM_COMPUTE_GRAPH_LAYERS_H */