blob: 1a71c89e54bcdaa6d01181583ae03377f5e44ed8 [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
Giuseppe Rossini3b47b742019-02-15 10:24:47 +00002 * Copyright (c) 2018-2019 ARM Limited.
Georgios Pinitasd8734b52017-12-22 15:27:52 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010024#ifndef __ARM_COMPUTE_GRAPH_LAYERS_H__
25#define __ARM_COMPUTE_GRAPH_LAYERS_H__
Georgios Pinitasd8734b52017-12-22 15:27:52 +000026
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010027#include "arm_compute/graph/GraphBuilder.h"
28#include "arm_compute/graph/Types.h"
29#include "arm_compute/graph/frontend/ILayer.h"
30#include "arm_compute/graph/frontend/IStream.h"
31#include "arm_compute/graph/frontend/SubStream.h"
Georgios Pinitasd8734b52017-12-22 15:27:52 +000032
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010040namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +000041{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010060 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000061 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
69/** Output Layer */
70class OutputLayer final : public ILayer
71{
72public:
Alex Gildayc357c472018-03-21 13:54:09 +000073 /** Construct an output layer.
74 *
75 * @param[in] accessor Accessor to give output tensor data to.
76 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000077 OutputLayer(ITensorAccessorUPtr accessor)
78 : _accessor(std::move(accessor))
79 {
80 }
81
82 NodeID create_layer(IStream &s) override
83 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010084 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000085 NodeIdxPair input = { s.tail_node(), 0 };
86 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
87 }
88
89private:
90 ITensorAccessorUPtr _accessor;
91};
92
93/** Activation Layer */
94class ActivationLayer final : public ILayer
95{
96public:
Alex Gildayc357c472018-03-21 13:54:09 +000097 /** Construct an activation layer.
98 *
99 * @param[in] act_info Activation information
100 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000101 ActivationLayer(ActivationLayerInfo act_info)
102 : _act_info(act_info)
103 {
104 }
105
106 NodeID create_layer(IStream &s) override
107 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100108 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000109 NodeIdxPair input = { s.tail_node(), 0 };
110 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info);
111 }
112
113private:
114 ActivationLayerInfo _act_info;
115};
116
117/** Batchnormalization Layer */
118class BatchNormalizationLayer final : public ILayer
119{
120public:
Alex Gildayc357c472018-03-21 13:54:09 +0000121 /** Construct a batch normalization layer.
122 *
123 * @param[in] mean Accessor to get mean tensor data from.
124 * @param[in] var Accessor to get var tensor data from.
125 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
126 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
127 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
128 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000129 BatchNormalizationLayer(ITensorAccessorUPtr mean,
130 ITensorAccessorUPtr var,
131 ITensorAccessorUPtr gamma = nullptr,
132 ITensorAccessorUPtr beta = nullptr,
133 float epsilon = 0.001f)
134 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
135 {
136 }
137
138 NodeID create_layer(IStream &s) override
139 {
140 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
141 ARM_COMPUTE_ERROR_ON(_var == nullptr);
142
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100143 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000144 NodeIdxPair input = { s.tail_node(), 0 };
145 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
146 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
147 }
148
149private:
150 ITensorAccessorUPtr _mean;
151 ITensorAccessorUPtr _var;
152 ITensorAccessorUPtr _gamma;
153 ITensorAccessorUPtr _beta;
154 float _epsilon;
155};
156
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100157/** Bounding Box Transform Layer */
158class BoundingBoxTransformLayer final : public ILayer
159{
160public:
161 /** Construct a bounding box transform layer.
162 *
163 * @param[in] sub_stream_input Graph sub-stream for the input
164 * @param[in] sub_stream_deltas Graph sub-stream for the deltas
165 * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
166 */
167 BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
168 : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
169 {
170 }
171
172 /** Create layer and add to the given stream.
173 *
174 * @param[in] s Stream to add layer to.
175 *
176 * @return ID of the created node.
177 */
178 NodeID create_layer(IStream &s) override
179 {
180 NodeParams common_params = { name(), s.hints().target_hint };
181 NodeIdxPair input = { _ss_input.tail_node(), 0 };
182 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
183 return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
184 }
185
186private:
187 SubStream _ss_input;
188 SubStream _ss_deltas;
189 BoundingBoxTransformInfo _bbox_info;
190};
191
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100192/** Channel Shuffle Layer */
193class ChannelShuffleLayer final : public ILayer
194{
195public:
196 /** Construct a Channel Shuffle layer.
197 *
198 * @param[in] num_groups Number of groups
199 */
200 ChannelShuffleLayer(unsigned int num_groups)
201 : _num_groups(num_groups)
202 {
203 }
204
205 NodeID create_layer(IStream &s) override
206 {
207 NodeParams common_params = { name(), s.hints().target_hint };
208 NodeIdxPair input = { s.tail_node(), 0 };
209 return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
210 }
211
212private:
213 unsigned int _num_groups;
214};
215
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100216/** Concat Layer */
217class ConcatLayer final : public ILayer
218{
219public:
220 /** Construct a concatenation layer
221 *
222 * @param[in] sub_stream1 First graph branch
223 * @param[in] sub_stream2 Second graph branch
224 * @param[in] rest_sub_streams Rest sub-graph branches
225 */
226 template <typename... Ts>
227 ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Pablo Tello32521432018-11-15 14:43:10 +0000228 : _sub_streams(), _axis(DataLayoutDimension::CHANNEL)
229 {
230 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
231 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
232
233 utility::for_each([&](SubStream && sub_stream)
234 {
235 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
236 },
237 std::move(rest_sub_streams)...);
238 }
239 /** Construct a concatenation layer
240 *
241 * @param[in] axis Axis over the concatenation will be performed
242 * @param[in] sub_stream1 First graph branch
243 * @param[in] sub_stream2 Second graph branch
244 * @param[in] rest_sub_streams Rest sub-graph branches
245 */
246 template <typename... Ts>
247 ConcatLayer(DataLayoutDimension axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
248 : _sub_streams(), _axis(axis)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100249 {
250 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
251 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
252
253 utility::for_each([&](SubStream && sub_stream)
254 {
255 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
256 },
257 std::move(rest_sub_streams)...);
258 }
259 /** Construct a concat layer
260 *
261 * @param[in] sub_stream Sub-stream
262 */
263 template <typename... Ts>
264 ConcatLayer(SubStream &&sub_stream)
Pablo Tello32521432018-11-15 14:43:10 +0000265 : _sub_streams(), _axis(DataLayoutDimension::CHANNEL)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100266 {
267 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
268 }
269 NodeID create_layer(IStream &s) override
270 {
271 NodeID nid = EmptyNodeID;
272 NodeParams common_params = { name(), s.hints().target_hint };
273 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
274 {
275 nid = _sub_streams[0]->tail_node();
276 }
277 else
278 {
279 // Collect tail nodes and concatenate
280 std::vector<NodeIdxPair> nodes;
281 for(auto &ss : _sub_streams)
282 {
283 if(ss && (ss->tail_node() != EmptyNodeID))
284 {
285 const auto tail_node = s.graph().node(ss->tail_node());
286 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
287 {
288 nodes.push_back({ ss->tail_node(), 0 });
289 }
290 }
291 }
Pablo Tello32521432018-11-15 14:43:10 +0000292 nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _axis);
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100293 }
294 return nid;
295 }
296
297private:
298 std::vector<std::unique_ptr<SubStream>> _sub_streams;
Pablo Tello32521432018-11-15 14:43:10 +0000299 DataLayoutDimension _axis;
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100300};
301
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000302/** Convolution Layer */
303class ConvolutionLayer final : public ILayer
304{
305public:
Alex Gildayc357c472018-03-21 13:54:09 +0000306 /** Construct a convolution layer.
307 *
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100308 * @param[in] conv_width Convolution width.
309 * @param[in] conv_height Convolution height.
310 * @param[in] ofm Output feature map.
311 * @param[in] weights Accessor to get kernel weights from.
312 * @param[in] bias Accessor to get kernel bias from.
313 * @param[in] conv_info Padding and stride information.
314 * @param[in] num_groups (Optional) Number of groups. Default: 1.
315 * @param[in] weights_quant_info (Optional) Weights quantization information
316 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000317 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100318 ConvolutionLayer(unsigned int conv_width,
319 unsigned int conv_height,
320 unsigned int ofm,
321 ITensorAccessorUPtr weights,
322 ITensorAccessorUPtr bias,
323 PadStrideInfo conv_info,
324 unsigned int num_groups = 1,
325 const QuantizationInfo weights_quant_info = QuantizationInfo(),
326 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000327 : _conv_width(conv_width),
328 _conv_height(conv_height),
329 _ofm(ofm),
330 _conv_info(std::move(conv_info)),
331 _num_groups(num_groups),
332 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100333 _bias(std::move(bias)),
334 _weights_quant_info(std::move(weights_quant_info)),
335 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000336 {
337 }
338
339 NodeID create_layer(IStream &s) override
340 {
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000341 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100342 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000343 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
Georgios Pinitasee33ea52018-03-08 16:01:29 +0000344 Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
Giorgio Arena59631a12018-05-02 13:59:04 +0100345 s.hints().convolution_method_hint, s.hints().fast_math_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100346 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000347 }
348
349private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100350 unsigned int _conv_width;
351 unsigned int _conv_height;
352 unsigned int _ofm;
353 const PadStrideInfo _conv_info;
354 unsigned int _num_groups;
355 ITensorAccessorUPtr _weights;
356 ITensorAccessorUPtr _bias;
357 const QuantizationInfo _weights_quant_info;
358 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000359};
360
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100361/** Deconvolution Layer */
362class DeconvolutionLayer final : public ILayer
363{
364public:
365 /** Construct a convolution layer.
366 *
367 * @param[in] conv_width Convolution width.
368 * @param[in] conv_height Convolution height.
369 * @param[in] ofm Output feature map.
370 * @param[in] weights Accessor to get kernel weights from.
371 * @param[in] bias Accessor to get kernel bias from.
372 * @param[in] deconv_info Padding and stride information.
373 * @param[in] inner_border Inner border padding (right, top)
374 */
375 DeconvolutionLayer(unsigned int conv_width,
376 unsigned int conv_height,
377 unsigned int ofm,
378 ITensorAccessorUPtr weights,
379 ITensorAccessorUPtr bias,
380 PadStrideInfo deconv_info,
381 Size2D inner_border)
382 : _conv_width(conv_width),
383 _conv_height(conv_height),
384 _ofm(ofm),
385 _deconv_info(std::move(deconv_info)),
386 _inner_border(inner_border),
387 _weights(std::move(weights)),
388 _bias(std::move(bias))
389 {
390 }
391
392 NodeID create_layer(IStream &s) override
393 {
394 NodeIdxPair input = { s.tail_node(), 0 };
395 NodeParams common_params = { name(), s.hints().target_hint };
396 return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
397 Size2D(_conv_width, _conv_height), _ofm, _deconv_info, _inner_border,
398 std::move(_weights), std::move(_bias));
399 }
400
401private:
402 unsigned int _conv_width;
403 unsigned int _conv_height;
404 unsigned int _ofm;
405 const PadStrideInfo _deconv_info;
406 Size2D _inner_border;
407 ITensorAccessorUPtr _weights;
408 ITensorAccessorUPtr _bias;
409};
410
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000411/** Depthwise Convolution Layer */
412class DepthwiseConvolutionLayer final : public ILayer
413{
414public:
Alex Gildayc357c472018-03-21 13:54:09 +0000415 /** Construct a depthwise convolution layer.
416 *
Georgios Pinitas05045c12018-12-07 18:31:47 +0000417 * @param[in] conv_width Convolution width.
418 * @param[in] conv_height Convolution height.
419 * @param[in] weights Accessor to get kernel weights from.
420 * @param[in] bias Accessor to get kernel bias from.
421 * @param[in] conv_info Padding and stride information.
422 * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
423 * @param[in] quant_info (Optional) Quantization info used for weights
Alex Gildayc357c472018-03-21 13:54:09 +0000424 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100425 DepthwiseConvolutionLayer(unsigned int conv_width,
426 unsigned int conv_height,
427 ITensorAccessorUPtr weights,
428 ITensorAccessorUPtr bias,
429 PadStrideInfo conv_info,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000430 int depth_multiplier = 1,
431 const QuantizationInfo quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000432 : _conv_width(conv_width),
433 _conv_height(conv_height),
434 _conv_info(std::move(conv_info)),
435 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100436 _bias(std::move(bias)),
Georgios Pinitas05045c12018-12-07 18:31:47 +0000437 _depth_multiplier(depth_multiplier),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100438 _quant_info(std::move(quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000439 {
440 }
441
442 NodeID create_layer(IStream &s) override
443 {
444 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100445 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000446 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000447 input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000448 s.hints().depthwise_convolution_method_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100449 std::move(_weights), std::move(_bias), std::move(_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000450 }
451
452private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100453 unsigned int _conv_width;
454 unsigned int _conv_height;
455 const PadStrideInfo _conv_info;
456 ITensorAccessorUPtr _weights;
457 ITensorAccessorUPtr _bias;
Georgios Pinitas05045c12018-12-07 18:31:47 +0000458 int _depth_multiplier;
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100459 const QuantizationInfo _quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000460};
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000461/** DetectionOutput Layer */
462class DetectionOutputLayer final : public ILayer
463{
464public:
465 /** Construct a detection output layer.
466 *
467 * @param[in] sub_stream_conf Confidence graph sub-stream.
468 * @param[in] sub_stream_prior PriorBox graph sub-stream.
469 * @param[in] detect_info DetectionOutput parameters.
470 */
471 DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, DetectionOutputLayerInfo detect_info)
472 : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
473 {
474 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000475
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000476 NodeID create_layer(IStream &s) override
477 {
478 NodeParams common_params = { name(), s.hints().target_hint };
479 NodeIdxPair input_loc = { s.tail_node(), 0 };
480 NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
481 NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
482 return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
483 }
484
485private:
486 SubStream _ss_conf;
487 SubStream _ss_prior;
488 DetectionOutputLayerInfo _detect_info;
489};
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100490/** Dummy Layer */
491class DummyLayer final : public ILayer
492{
493public:
494 /** Construct an input layer.
495 *
496 * @param[in] shape Output shape
497 */
498 DummyLayer(TensorShape shape)
499 : _shape(shape)
500 {
501 }
502
503 NodeID create_layer(IStream &s) override
504 {
505 NodeParams common_params = { name(), s.hints().target_hint };
506 NodeIdxPair input = { s.tail_node(), 0 };
507 return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
508 }
509
510private:
511 TensorShape _shape;
512};
513
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100514class EltwiseLayer final : public ILayer
515{
516public:
517 /** Construct an element-wise operation layer
518 *
519 * @param[in] sub_stream0 First graph sub-stream
520 * @param[in] sub_stream1 First graph sub-stream
521 * @param[in] op Element-wise operation to perform
522 */
523 EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
524 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
525 {
526 }
527
528 NodeID create_layer(IStream &s) override
529 {
530 NodeParams common_params = { name(), s.hints().target_hint };
531 NodeIdxPair input0 = { _ss0.tail_node(), 0 };
532 NodeIdxPair input1 = { _ss1.tail_node(), 0 };
533
534 return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
535 }
536
537private:
538 SubStream _ss0;
539 SubStream _ss1;
540 EltwiseOperation _op;
541};
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000542/** Flatten Layer */
543class FlattenLayer final : public ILayer
544{
545public:
Alex Gildayc357c472018-03-21 13:54:09 +0000546 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000547 FlattenLayer()
548 {
549 }
550
551 NodeID create_layer(IStream &s) override
552 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100553 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000554 NodeIdxPair input = { s.tail_node(), 0 };
555 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
556 }
557};
558
559/** Fully Connected Layer */
560class FullyConnectedLayer final : public ILayer
561{
562public:
Alex Gildayc357c472018-03-21 13:54:09 +0000563 /** Construct a fully connected layer.
564 *
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100565 * @param[in] num_outputs Number of outputs.
566 * @param[in] weights Accessor to get weights from.
567 * @param[in] bias Accessor to get bias from.
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100568 * @param[in] fc_info (Optional) Fully connected layer metadata
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100569 * @param[in] weights_quant_info (Optional) Weights quantization information
570 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000571 */
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100572 FullyConnectedLayer(unsigned int num_outputs,
573 ITensorAccessorUPtr weights,
574 ITensorAccessorUPtr bias,
575 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
576 const QuantizationInfo weights_quant_info = QuantizationInfo(),
577 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100578 : _num_outputs(num_outputs),
579 _weights(std::move(weights)),
580 _bias(std::move(bias)),
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100581 _fc_info(fc_info),
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100582 _weights_quant_info(std::move(weights_quant_info)),
583 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000584 {
585 }
586
Michele Di Giorgio47e6fed2018-11-13 12:04:25 +0000587 /** Create layer and add to the given stream.
588 *
589 * @param[in] s Stream to add layer to.
590 *
591 * @return ID of the created node.
592 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000593 NodeID create_layer(IStream &s) override
594 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100595 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000596 NodeIdxPair input = { s.tail_node(), 0 };
597 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100598 std::move(_weights), std::move(_bias), _fc_info,
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100599 std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000600 }
601
602private:
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100603 unsigned int _num_outputs;
604 ITensorAccessorUPtr _weights;
605 ITensorAccessorUPtr _bias;
606 const FullyConnectedLayerInfo _fc_info;
607 const QuantizationInfo _weights_quant_info;
608 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000609};
610
Manuel Bottiniec5f85f2019-02-13 16:34:56 +0000611/** Generate Proposals Layer */
612class GenerateProposalsLayer final : public ILayer
613{
614public:
615 /** Construct a generate proposals layer.
616 *
617 * @param[in] ss_scores Graph sub-stream for the scores.
618 * @param[in] ss_deltas Graph sub-stream for the deltas.
619 * @param[in] ss_anchors Graph sub-stream for the anchors.
620 * @param[in] info Generate Proposals operation information.
621 */
622 GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
623 : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
624 {
625 }
626
627 /** Create layer and add to the given stream.
628 *
629 * @param[in] s Stream to add layer to.
630 *
631 * @return ID of the created node.
632 */
633 NodeID create_layer(IStream &s) override
634 {
635 NodeParams common_params = { name(), s.hints().target_hint };
636 NodeIdxPair scores = { _ss_scores.tail_node(), 0 };
637 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
638 NodeIdxPair anchors = { _ss_anchors.tail_node(), 0 };
639 return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
640 }
641
642private:
643 SubStream _ss_scores;
644 SubStream _ss_deltas;
645 SubStream _ss_anchors;
646 GenerateProposalsInfo _info;
647};
648
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000649/** Normalization Layer */
650class NormalizationLayer final : public ILayer
651{
652public:
Alex Gildayc357c472018-03-21 13:54:09 +0000653 /** Construct a normalization layer.
654 *
655 * @param[in] norm_info Normalization information.
656 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000657 NormalizationLayer(NormalizationLayerInfo norm_info)
658 : _norm_info(norm_info)
659 {
660 }
661
662 NodeID create_layer(IStream &s) override
663 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100664 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000665 NodeIdxPair input = { s.tail_node(), 0 };
666 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
667 }
668
669private:
670 NormalizationLayerInfo _norm_info;
671};
672
Michele Di Giorgio555d1102018-09-12 13:51:59 +0100673/** Normalize planar YUV Layer */
674class NormalizePlanarYUVLayer final : public ILayer
675{
676public:
677 /** Construct a normalize planar YUV layer.
678 *
679 * @param[in] mean Accessor to get mean tensor data from.
680 * @param[in] std Accessor to get std tensor data from.
681 */
682 NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
683 ITensorAccessorUPtr std)
684 : _mean(std::move(mean)), _std(std::move(std))
685 {
686 }
687
688 NodeID create_layer(IStream &s) override
689 {
690 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
691 ARM_COMPUTE_ERROR_ON(_std == nullptr);
692
693 NodeParams common_params = { name(), s.hints().target_hint };
694 NodeIdxPair input = { s.tail_node(), 0 };
695 return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
696 std::move(_mean), std::move(_std));
697 }
698
699private:
700 ITensorAccessorUPtr _mean;
701 ITensorAccessorUPtr _std;
702};
703
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100704/** Pad Layer */
705class PadLayer final : public ILayer
706{
707public:
708 /** Construct a pad layer.
709 *
710 * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
711 * specifies the front and the end padding in the i-th dimension.
712 */
713 PadLayer(PaddingList padding)
714 : _padding(padding)
715 {
716 }
717
718 NodeID create_layer(IStream &s) override
719 {
720 NodeParams common_params = { name(), s.hints().target_hint };
721 NodeIdxPair input = { s.tail_node(), 0 };
722 return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding);
723 }
724
725private:
726 PaddingList _padding;
727};
728
Georgios Pinitas57c48242018-08-02 13:41:49 +0100729/** Permute Layer */
730class PermuteLayer final : public ILayer
731{
732public:
733 /** Construct a permute layer.
734 *
735 * @param[in] perm Permutation vector.
736 * @param[in] layout (Optional) Data layout to assign to permuted tensor.
737 * If UNKNOWN then the input's layout will be used.
738 */
739 PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
740 : _perm(perm), _layout(layout)
741 {
742 }
743
744 NodeID create_layer(IStream &s) override
745 {
746 NodeParams common_params = { name(), s.hints().target_hint };
747 NodeIdxPair input = { s.tail_node(), 0 };
748 return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
749 }
750
751private:
752 PermutationVector _perm;
753 DataLayout _layout;
754};
755
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000756/** Pooling Layer */
757class PoolingLayer final : public ILayer
758{
759public:
Alex Gildayc357c472018-03-21 13:54:09 +0000760 /** Construct a pooling layer.
761 *
762 * @param[in] pool_info Pooling information.
763 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000764 PoolingLayer(PoolingLayerInfo pool_info)
765 : _pool_info(pool_info)
766 {
767 }
768
769 NodeID create_layer(IStream &s) override
770 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100771 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000772 NodeIdxPair input = { s.tail_node(), 0 };
773 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
774 }
775
776private:
777 PoolingLayerInfo _pool_info;
778};
779
Pablo Tello32521432018-11-15 14:43:10 +0000780/** PriorBox Layer */
781class PriorBoxLayer final : public ILayer
782{
783public:
784 /** Construct a priorbox layer.
785 *
786 * @param[in] sub_stream First graph sub-stream
787 * @param[in] prior_info PriorBox parameters.
788 */
789 PriorBoxLayer(SubStream &&sub_stream, PriorBoxLayerInfo prior_info)
790 : _ss(std::move(sub_stream)), _prior_info(prior_info)
791 {
792 }
793
794 NodeID create_layer(IStream &s) override
795 {
796 NodeParams common_params = { name(), s.hints().target_hint };
797 NodeIdxPair input0 = { s.tail_node(), 0 };
798 NodeIdxPair input1 = { _ss.tail_node(), 0 };
799 return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
800 }
801
802private:
803 SubStream _ss;
804 PriorBoxLayerInfo _prior_info;
805};
806
Gian Marco Iodice23e24792018-09-07 15:32:14 +0100807/** Reorg Layer */
808class ReorgLayer final : public ILayer
809{
810public:
811 /** Construct a reorg layer.
812 *
813 * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
814 * It defines the spatial distance between 2 consecutive pixels in the x and y direction
815 */
816 ReorgLayer(int stride)
817 : _stride(stride)
818 {
819 }
820
821 NodeID create_layer(IStream &s) override
822 {
823 NodeParams common_params = { name(), s.hints().target_hint };
824 NodeIdxPair input = { s.tail_node(), 0 };
825 return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
826 }
827
828private:
829 int _stride;
830};
831
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000832/** Reshape Layer */
833class ReshapeLayer final : public ILayer
834{
835public:
Alex Gildayc357c472018-03-21 13:54:09 +0000836 /** Construct a reshape layer.
837 *
838 * @param[in] shape Target shape.
839 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000840 ReshapeLayer(TensorShape shape)
841 : _shape(shape)
842 {
843 }
844
845 NodeID create_layer(IStream &s) override
846 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100847 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000848 NodeIdxPair input = { s.tail_node(), 0 };
849 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
850 }
851
852private:
853 TensorShape _shape;
854};
855
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100856/** Resize Layer */
857class ResizeLayer final : public ILayer
858{
859public:
860 ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
861 : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
862 {
863 }
864
865 NodeID create_layer(IStream &s) override
866 {
867 NodeParams common_params = { name(), s.hints().target_hint };
868 NodeIdxPair input = { s.tail_node(), 0 };
869 return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
870 }
871
872private:
873 InterpolationPolicy _policy;
874 float _width_scale;
875 float _height_scale;
876};
877
Manuel Bottini3f9d4d72018-10-19 14:04:42 +0100878/** ROIAlign Layer */
879class ROIAlignLayer final : public ILayer
880{
881public:
882 /** Construct a RoiAlign layer.
883 *
884 * @param[in] sub_stream_input Graph sub-stream for the input
885 * @param[in] sub_stream_rois Graph sub-stream for the rois
886 * @param[in] pool_info Pooling information.
887 */
888 ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
889 : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
890 {
891 }
892
893 /** Prevent instances of this class from being copy constructed */
894 ROIAlignLayer(const ROIAlignLayer &) = delete;
895 /** Prevent instances of this class from being copied */
896 ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
897
898 NodeID create_layer(IStream &s) override
899 {
900 NodeParams common_params = { name(), s.hints().target_hint };
901 NodeIdxPair input = { _ss_input.tail_node(), 0 };
902 NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
903 return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
904 }
905
906private:
907 SubStream _ss_input;
908 SubStream _ss_rois;
909 ROIPoolingLayerInfo _pool_info;
910};
911
Isabella Gottardi88d5b222018-04-06 12:24:55 +0100912/** Scale Layer */
913class ScaleLayer final : public ILayer
914{
915public:
916 /** Construct a scale layer.
917 *
918 * @param[in] mul_w Accessor to get mul weight from.
919 * @param[in] add_w Accessor to get add weight from.
920 */
921 ScaleLayer(ITensorAccessorUPtr mul_w,
922 ITensorAccessorUPtr add_w)
923 : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
924 {
925 }
926
927 NodeID create_layer(IStream &s) override
928 {
929 NodeParams common_params = { name(), s.hints().target_hint };
930 NodeIdxPair input = { s.tail_node(), 0 };
931 return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
932 }
933
934private:
935 ITensorAccessorUPtr _mul_w;
936 ITensorAccessorUPtr _add_w;
937};
938
Michele Di Giorgioc30b6682018-09-12 17:44:08 +0100939/** Slice Layer */
940class SliceLayer final : public ILayer
941{
942public:
943 /** Construct a slice layer.
944 *
945 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
946 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
947 */
948 SliceLayer(Coordinates &starts, Coordinates &ends)
949 : _starts(starts), _ends(ends)
950 {
951 }
952
953 NodeID create_layer(IStream &s) override
954 {
955 NodeParams common_params = { name(), s.hints().target_hint };
956 NodeIdxPair input = { s.tail_node(), 0 };
957 return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
958 }
959
960private:
961 Coordinates _starts;
962 Coordinates _ends;
963};
964
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000965/** Softmax Layer */
966class SoftmaxLayer final : public ILayer
967{
968public:
Alex Gildayc357c472018-03-21 13:54:09 +0000969 /** Construct a softmax layer.
970 *
971 * @param[in] beta (Optional) Beta value. Default 1.0.
972 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000973 SoftmaxLayer(float beta = 1.0f)
974 : _beta(beta)
975 {
976 }
977
978 NodeID create_layer(IStream &s) override
979 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100980 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000981 NodeIdxPair input = { s.tail_node(), 0 };
982 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
983 }
984
985private:
986 float _beta;
987};
Michalis Spyrou96f67692018-09-13 11:39:28 +0100988
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +0100989/** Upsample Layer */
990class UpsampleLayer final : public ILayer
991{
992public:
993 /** Construct a Upsample layer.
994 *
995 * @param[in] info Stride info
996 * @param[in] upsampling_policy Upsampling policy
997 */
998 UpsampleLayer(Size2D info, InterpolationPolicy upsampling_policy)
999 : _info(info), _upsampling_policy(upsampling_policy)
1000 {
1001 }
1002
1003 NodeID create_layer(IStream &s) override
1004 {
1005 NodeParams common_params = { name(), s.hints().target_hint };
1006 NodeIdxPair input = { s.tail_node(), 0 };
1007 return GraphBuilder::add_upsample_node(s.graph(), common_params, input, _info, _upsampling_policy);
1008 }
1009
1010private:
1011 Size2D _info;
1012 InterpolationPolicy _upsampling_policy;
1013};
1014
Michalis Spyrou96f67692018-09-13 11:39:28 +01001015/** YOLO Layer */
1016class YOLOLayer final : public ILayer
1017{
1018public:
1019 /** Construct a YOLO layer.
1020 *
1021 * @param[in] act_info Activation info
1022 * @param[in] num_classes Number of classes to activate
1023 */
1024 YOLOLayer(ActivationLayerInfo act_info, int32_t num_classes)
1025 : _act_info(act_info), _num_classes(num_classes)
1026 {
1027 }
1028
1029 NodeID create_layer(IStream &s) override
1030 {
1031 NodeParams common_params = { name(), s.hints().target_hint };
1032 NodeIdxPair input = { s.tail_node(), 0 };
1033 return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info, _num_classes);
1034 }
1035
1036private:
1037 ActivationLayerInfo _act_info;
1038 int32_t _num_classes;
1039};
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001040} // namespace frontend
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001041} // namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001042} // namespace arm_compute
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001043#endif /* __ARM_COMPUTE_GRAPH_LAYERS_H__ */