blob: d10fa7f27a4668478de95da5855bf1686f42bebe [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
Giuseppe Rossinibb365de2019-02-15 10:24:47 +00002 * Copyright (c) 2018-2019 ARM Limited.
Georgios Pinitasd8734b52017-12-22 15:27:52 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010024#ifndef __ARM_COMPUTE_GRAPH_LAYERS_H__
25#define __ARM_COMPUTE_GRAPH_LAYERS_H__
Georgios Pinitasd8734b52017-12-22 15:27:52 +000026
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010027#include "arm_compute/graph/GraphBuilder.h"
28#include "arm_compute/graph/Types.h"
29#include "arm_compute/graph/frontend/ILayer.h"
30#include "arm_compute/graph/frontend/IStream.h"
31#include "arm_compute/graph/frontend/SubStream.h"
Georgios Pinitasd8734b52017-12-22 15:27:52 +000032
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010040namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +000041{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010060 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000061 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
69/** Output Layer */
70class OutputLayer final : public ILayer
71{
72public:
Alex Gildayc357c472018-03-21 13:54:09 +000073 /** Construct an output layer.
74 *
75 * @param[in] accessor Accessor to give output tensor data to.
76 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000077 OutputLayer(ITensorAccessorUPtr accessor)
78 : _accessor(std::move(accessor))
79 {
80 }
81
82 NodeID create_layer(IStream &s) override
83 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010084 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000085 NodeIdxPair input = { s.tail_node(), 0 };
86 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
87 }
88
89private:
90 ITensorAccessorUPtr _accessor;
91};
92
93/** Activation Layer */
94class ActivationLayer final : public ILayer
95{
96public:
Alex Gildayc357c472018-03-21 13:54:09 +000097 /** Construct an activation layer.
98 *
99 * @param[in] act_info Activation information
100 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000101 ActivationLayer(ActivationLayerInfo act_info)
102 : _act_info(act_info)
103 {
104 }
105
106 NodeID create_layer(IStream &s) override
107 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100108 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000109 NodeIdxPair input = { s.tail_node(), 0 };
110 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info);
111 }
112
113private:
114 ActivationLayerInfo _act_info;
115};
116
117/** Batchnormalization Layer */
118class BatchNormalizationLayer final : public ILayer
119{
120public:
Alex Gildayc357c472018-03-21 13:54:09 +0000121 /** Construct a batch normalization layer.
122 *
123 * @param[in] mean Accessor to get mean tensor data from.
124 * @param[in] var Accessor to get var tensor data from.
125 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
126 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
127 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
128 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000129 BatchNormalizationLayer(ITensorAccessorUPtr mean,
130 ITensorAccessorUPtr var,
131 ITensorAccessorUPtr gamma = nullptr,
132 ITensorAccessorUPtr beta = nullptr,
133 float epsilon = 0.001f)
134 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
135 {
136 }
137
138 NodeID create_layer(IStream &s) override
139 {
140 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
141 ARM_COMPUTE_ERROR_ON(_var == nullptr);
142
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100143 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000144 NodeIdxPair input = { s.tail_node(), 0 };
145 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
146 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
147 }
148
149private:
150 ITensorAccessorUPtr _mean;
151 ITensorAccessorUPtr _var;
152 ITensorAccessorUPtr _gamma;
153 ITensorAccessorUPtr _beta;
154 float _epsilon;
155};
156
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100157/** Bounding Box Transform Layer */
158class BoundingBoxTransformLayer final : public ILayer
159{
160public:
161 /** Construct a bounding box transform layer.
162 *
163 * @param[in] sub_stream_input Graph sub-stream for the input
164 * @param[in] sub_stream_deltas Graph sub-stream for the deltas
165 * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
166 */
167 BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
168 : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
169 {
170 }
171
172 /** Create layer and add to the given stream.
173 *
174 * @param[in] s Stream to add layer to.
175 *
176 * @return ID of the created node.
177 */
178 NodeID create_layer(IStream &s) override
179 {
180 NodeParams common_params = { name(), s.hints().target_hint };
181 NodeIdxPair input = { _ss_input.tail_node(), 0 };
182 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
183 return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
184 }
185
186private:
187 SubStream _ss_input;
188 SubStream _ss_deltas;
189 BoundingBoxTransformInfo _bbox_info;
190};
191
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100192/** Channel Shuffle Layer */
193class ChannelShuffleLayer final : public ILayer
194{
195public:
196 /** Construct a Channel Shuffle layer.
197 *
198 * @param[in] num_groups Number of groups
199 */
200 ChannelShuffleLayer(unsigned int num_groups)
201 : _num_groups(num_groups)
202 {
203 }
204
205 NodeID create_layer(IStream &s) override
206 {
207 NodeParams common_params = { name(), s.hints().target_hint };
208 NodeIdxPair input = { s.tail_node(), 0 };
209 return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
210 }
211
212private:
213 unsigned int _num_groups;
214};
215
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100216/** Concat Layer */
217class ConcatLayer final : public ILayer
218{
219public:
220 /** Construct a concatenation layer
221 *
222 * @param[in] sub_stream1 First graph branch
223 * @param[in] sub_stream2 Second graph branch
224 * @param[in] rest_sub_streams Rest sub-graph branches
225 */
226 template <typename... Ts>
227 ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Pablo Tello32521432018-11-15 14:43:10 +0000228 : _sub_streams(), _axis(DataLayoutDimension::CHANNEL)
229 {
230 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
231 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
232
233 utility::for_each([&](SubStream && sub_stream)
234 {
235 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
236 },
237 std::move(rest_sub_streams)...);
238 }
239 /** Construct a concatenation layer
240 *
241 * @param[in] axis Axis over the concatenation will be performed
242 * @param[in] sub_stream1 First graph branch
243 * @param[in] sub_stream2 Second graph branch
244 * @param[in] rest_sub_streams Rest sub-graph branches
245 */
246 template <typename... Ts>
247 ConcatLayer(DataLayoutDimension axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
248 : _sub_streams(), _axis(axis)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100249 {
250 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
251 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
252
253 utility::for_each([&](SubStream && sub_stream)
254 {
255 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
256 },
257 std::move(rest_sub_streams)...);
258 }
259 /** Construct a concat layer
260 *
261 * @param[in] sub_stream Sub-stream
262 */
263 template <typename... Ts>
264 ConcatLayer(SubStream &&sub_stream)
Pablo Tello32521432018-11-15 14:43:10 +0000265 : _sub_streams(), _axis(DataLayoutDimension::CHANNEL)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100266 {
267 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
268 }
269 NodeID create_layer(IStream &s) override
270 {
271 NodeID nid = EmptyNodeID;
272 NodeParams common_params = { name(), s.hints().target_hint };
273 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
274 {
275 nid = _sub_streams[0]->tail_node();
276 }
277 else
278 {
279 // Collect tail nodes and concatenate
280 std::vector<NodeIdxPair> nodes;
281 for(auto &ss : _sub_streams)
282 {
283 if(ss && (ss->tail_node() != EmptyNodeID))
284 {
285 const auto tail_node = s.graph().node(ss->tail_node());
286 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
287 {
288 nodes.push_back({ ss->tail_node(), 0 });
289 }
290 }
291 }
Pablo Tello32521432018-11-15 14:43:10 +0000292 nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _axis);
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100293 }
294 return nid;
295 }
296
297private:
298 std::vector<std::unique_ptr<SubStream>> _sub_streams;
Pablo Tello32521432018-11-15 14:43:10 +0000299 DataLayoutDimension _axis;
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100300};
301
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000302/** Convolution Layer */
303class ConvolutionLayer final : public ILayer
304{
305public:
Alex Gildayc357c472018-03-21 13:54:09 +0000306 /** Construct a convolution layer.
307 *
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100308 * @param[in] conv_width Convolution width.
309 * @param[in] conv_height Convolution height.
310 * @param[in] ofm Output feature map.
311 * @param[in] weights Accessor to get kernel weights from.
312 * @param[in] bias Accessor to get kernel bias from.
313 * @param[in] conv_info Padding and stride information.
314 * @param[in] num_groups (Optional) Number of groups. Default: 1.
315 * @param[in] weights_quant_info (Optional) Weights quantization information
316 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000317 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100318 ConvolutionLayer(unsigned int conv_width,
319 unsigned int conv_height,
320 unsigned int ofm,
321 ITensorAccessorUPtr weights,
322 ITensorAccessorUPtr bias,
323 PadStrideInfo conv_info,
324 unsigned int num_groups = 1,
325 const QuantizationInfo weights_quant_info = QuantizationInfo(),
326 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000327 : _conv_width(conv_width),
328 _conv_height(conv_height),
329 _ofm(ofm),
330 _conv_info(std::move(conv_info)),
331 _num_groups(num_groups),
332 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100333 _bias(std::move(bias)),
334 _weights_quant_info(std::move(weights_quant_info)),
335 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000336 {
337 }
338
339 NodeID create_layer(IStream &s) override
340 {
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000341 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100342 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000343 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
Georgios Pinitasee33ea52018-03-08 16:01:29 +0000344 Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
Giorgio Arena59631a12018-05-02 13:59:04 +0100345 s.hints().convolution_method_hint, s.hints().fast_math_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100346 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000347 }
348
349private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100350 unsigned int _conv_width;
351 unsigned int _conv_height;
352 unsigned int _ofm;
353 const PadStrideInfo _conv_info;
354 unsigned int _num_groups;
355 ITensorAccessorUPtr _weights;
356 ITensorAccessorUPtr _bias;
357 const QuantizationInfo _weights_quant_info;
358 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000359};
360
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100361/** Deconvolution Layer */
362class DeconvolutionLayer final : public ILayer
363{
364public:
365 /** Construct a convolution layer.
366 *
367 * @param[in] conv_width Convolution width.
368 * @param[in] conv_height Convolution height.
369 * @param[in] ofm Output feature map.
370 * @param[in] weights Accessor to get kernel weights from.
371 * @param[in] bias Accessor to get kernel bias from.
372 * @param[in] deconv_info Padding and stride information.
373 * @param[in] inner_border Inner border padding (right, top)
374 */
375 DeconvolutionLayer(unsigned int conv_width,
376 unsigned int conv_height,
377 unsigned int ofm,
378 ITensorAccessorUPtr weights,
379 ITensorAccessorUPtr bias,
380 PadStrideInfo deconv_info,
381 Size2D inner_border)
382 : _conv_width(conv_width),
383 _conv_height(conv_height),
384 _ofm(ofm),
385 _deconv_info(std::move(deconv_info)),
386 _inner_border(inner_border),
387 _weights(std::move(weights)),
388 _bias(std::move(bias))
389 {
390 }
391
392 NodeID create_layer(IStream &s) override
393 {
394 NodeIdxPair input = { s.tail_node(), 0 };
395 NodeParams common_params = { name(), s.hints().target_hint };
396 return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
397 Size2D(_conv_width, _conv_height), _ofm, _deconv_info, _inner_border,
398 std::move(_weights), std::move(_bias));
399 }
400
401private:
402 unsigned int _conv_width;
403 unsigned int _conv_height;
404 unsigned int _ofm;
405 const PadStrideInfo _deconv_info;
406 Size2D _inner_border;
407 ITensorAccessorUPtr _weights;
408 ITensorAccessorUPtr _bias;
409};
410
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000411/** Depthwise Convolution Layer */
412class DepthwiseConvolutionLayer final : public ILayer
413{
414public:
Alex Gildayc357c472018-03-21 13:54:09 +0000415 /** Construct a depthwise convolution layer.
416 *
Georgios Pinitas05045c12018-12-07 18:31:47 +0000417 * @param[in] conv_width Convolution width.
418 * @param[in] conv_height Convolution height.
419 * @param[in] weights Accessor to get kernel weights from.
420 * @param[in] bias Accessor to get kernel bias from.
421 * @param[in] conv_info Padding and stride information.
422 * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
423 * @param[in] quant_info (Optional) Quantization info used for weights
Alex Gildayc357c472018-03-21 13:54:09 +0000424 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100425 DepthwiseConvolutionLayer(unsigned int conv_width,
426 unsigned int conv_height,
427 ITensorAccessorUPtr weights,
428 ITensorAccessorUPtr bias,
429 PadStrideInfo conv_info,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000430 int depth_multiplier = 1,
431 const QuantizationInfo quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000432 : _conv_width(conv_width),
433 _conv_height(conv_height),
434 _conv_info(std::move(conv_info)),
435 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100436 _bias(std::move(bias)),
Georgios Pinitas05045c12018-12-07 18:31:47 +0000437 _depth_multiplier(depth_multiplier),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100438 _quant_info(std::move(quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000439 {
440 }
441
442 NodeID create_layer(IStream &s) override
443 {
444 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100445 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000446 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000447 input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000448 s.hints().depthwise_convolution_method_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100449 std::move(_weights), std::move(_bias), std::move(_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000450 }
451
452private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100453 unsigned int _conv_width;
454 unsigned int _conv_height;
455 const PadStrideInfo _conv_info;
456 ITensorAccessorUPtr _weights;
457 ITensorAccessorUPtr _bias;
Georgios Pinitas05045c12018-12-07 18:31:47 +0000458 int _depth_multiplier;
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100459 const QuantizationInfo _quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000460};
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000461/** DetectionOutput Layer */
462class DetectionOutputLayer final : public ILayer
463{
464public:
465 /** Construct a detection output layer.
466 *
467 * @param[in] sub_stream_conf Confidence graph sub-stream.
468 * @param[in] sub_stream_prior PriorBox graph sub-stream.
469 * @param[in] detect_info DetectionOutput parameters.
470 */
471 DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, DetectionOutputLayerInfo detect_info)
472 : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
473 {
474 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000475
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000476 NodeID create_layer(IStream &s) override
477 {
478 NodeParams common_params = { name(), s.hints().target_hint };
479 NodeIdxPair input_loc = { s.tail_node(), 0 };
480 NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
481 NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
482 return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
483 }
484
485private:
486 SubStream _ss_conf;
487 SubStream _ss_prior;
488 DetectionOutputLayerInfo _detect_info;
489};
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100490/** Dummy Layer */
491class DummyLayer final : public ILayer
492{
493public:
494 /** Construct an input layer.
495 *
496 * @param[in] shape Output shape
497 */
498 DummyLayer(TensorShape shape)
499 : _shape(shape)
500 {
501 }
502
503 NodeID create_layer(IStream &s) override
504 {
505 NodeParams common_params = { name(), s.hints().target_hint };
506 NodeIdxPair input = { s.tail_node(), 0 };
507 return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
508 }
509
510private:
511 TensorShape _shape;
512};
513
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100514class EltwiseLayer final : public ILayer
515{
516public:
517 /** Construct an element-wise operation layer
518 *
519 * @param[in] sub_stream0 First graph sub-stream
520 * @param[in] sub_stream1 First graph sub-stream
521 * @param[in] op Element-wise operation to perform
522 */
523 EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
524 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
525 {
526 }
527
528 NodeID create_layer(IStream &s) override
529 {
530 NodeParams common_params = { name(), s.hints().target_hint };
531 NodeIdxPair input0 = { _ss0.tail_node(), 0 };
532 NodeIdxPair input1 = { _ss1.tail_node(), 0 };
533
534 return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
535 }
536
537private:
538 SubStream _ss0;
539 SubStream _ss1;
540 EltwiseOperation _op;
541};
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000542/** Flatten Layer */
543class FlattenLayer final : public ILayer
544{
545public:
Alex Gildayc357c472018-03-21 13:54:09 +0000546 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000547 FlattenLayer()
548 {
549 }
550
551 NodeID create_layer(IStream &s) override
552 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100553 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000554 NodeIdxPair input = { s.tail_node(), 0 };
555 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
556 }
557};
558
559/** Fully Connected Layer */
560class FullyConnectedLayer final : public ILayer
561{
562public:
Alex Gildayc357c472018-03-21 13:54:09 +0000563 /** Construct a fully connected layer.
564 *
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100565 * @param[in] num_outputs Number of outputs.
566 * @param[in] weights Accessor to get weights from.
567 * @param[in] bias Accessor to get bias from.
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100568 * @param[in] fc_info (Optional) Fully connected layer metadata
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100569 * @param[in] weights_quant_info (Optional) Weights quantization information
570 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000571 */
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100572 FullyConnectedLayer(unsigned int num_outputs,
573 ITensorAccessorUPtr weights,
574 ITensorAccessorUPtr bias,
575 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
576 const QuantizationInfo weights_quant_info = QuantizationInfo(),
577 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100578 : _num_outputs(num_outputs),
579 _weights(std::move(weights)),
580 _bias(std::move(bias)),
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100581 _fc_info(fc_info),
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100582 _weights_quant_info(std::move(weights_quant_info)),
583 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000584 {
585 }
586
Michele Di Giorgio47e6fed2018-11-13 12:04:25 +0000587 /** Create layer and add to the given stream.
588 *
589 * @param[in] s Stream to add layer to.
590 *
591 * @return ID of the created node.
592 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000593 NodeID create_layer(IStream &s) override
594 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100595 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000596 NodeIdxPair input = { s.tail_node(), 0 };
597 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100598 std::move(_weights), std::move(_bias), _fc_info,
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100599 std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000600 }
601
602private:
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100603 unsigned int _num_outputs;
604 ITensorAccessorUPtr _weights;
605 ITensorAccessorUPtr _bias;
606 const FullyConnectedLayerInfo _fc_info;
607 const QuantizationInfo _weights_quant_info;
608 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000609};
610
611/** Normalization Layer */
612class NormalizationLayer final : public ILayer
613{
614public:
Alex Gildayc357c472018-03-21 13:54:09 +0000615 /** Construct a normalization layer.
616 *
617 * @param[in] norm_info Normalization information.
618 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000619 NormalizationLayer(NormalizationLayerInfo norm_info)
620 : _norm_info(norm_info)
621 {
622 }
623
624 NodeID create_layer(IStream &s) override
625 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100626 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000627 NodeIdxPair input = { s.tail_node(), 0 };
628 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
629 }
630
631private:
632 NormalizationLayerInfo _norm_info;
633};
634
Michele Di Giorgio555d1102018-09-12 13:51:59 +0100635/** Normalize planar YUV Layer */
636class NormalizePlanarYUVLayer final : public ILayer
637{
638public:
639 /** Construct a normalize planar YUV layer.
640 *
641 * @param[in] mean Accessor to get mean tensor data from.
642 * @param[in] std Accessor to get std tensor data from.
643 */
644 NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
645 ITensorAccessorUPtr std)
646 : _mean(std::move(mean)), _std(std::move(std))
647 {
648 }
649
650 NodeID create_layer(IStream &s) override
651 {
652 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
653 ARM_COMPUTE_ERROR_ON(_std == nullptr);
654
655 NodeParams common_params = { name(), s.hints().target_hint };
656 NodeIdxPair input = { s.tail_node(), 0 };
657 return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
658 std::move(_mean), std::move(_std));
659 }
660
661private:
662 ITensorAccessorUPtr _mean;
663 ITensorAccessorUPtr _std;
664};
665
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100666/** Pad Layer */
667class PadLayer final : public ILayer
668{
669public:
670 /** Construct a pad layer.
671 *
672 * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
673 * specifies the front and the end padding in the i-th dimension.
674 */
675 PadLayer(PaddingList padding)
676 : _padding(padding)
677 {
678 }
679
680 NodeID create_layer(IStream &s) override
681 {
682 NodeParams common_params = { name(), s.hints().target_hint };
683 NodeIdxPair input = { s.tail_node(), 0 };
684 return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding);
685 }
686
687private:
688 PaddingList _padding;
689};
690
Georgios Pinitas57c48242018-08-02 13:41:49 +0100691/** Permute Layer */
692class PermuteLayer final : public ILayer
693{
694public:
695 /** Construct a permute layer.
696 *
697 * @param[in] perm Permutation vector.
698 * @param[in] layout (Optional) Data layout to assign to permuted tensor.
699 * If UNKNOWN then the input's layout will be used.
700 */
701 PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
702 : _perm(perm), _layout(layout)
703 {
704 }
705
706 NodeID create_layer(IStream &s) override
707 {
708 NodeParams common_params = { name(), s.hints().target_hint };
709 NodeIdxPair input = { s.tail_node(), 0 };
710 return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
711 }
712
713private:
714 PermutationVector _perm;
715 DataLayout _layout;
716};
717
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000718/** Pooling Layer */
719class PoolingLayer final : public ILayer
720{
721public:
Alex Gildayc357c472018-03-21 13:54:09 +0000722 /** Construct a pooling layer.
723 *
724 * @param[in] pool_info Pooling information.
725 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000726 PoolingLayer(PoolingLayerInfo pool_info)
727 : _pool_info(pool_info)
728 {
729 }
730
731 NodeID create_layer(IStream &s) override
732 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100733 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000734 NodeIdxPair input = { s.tail_node(), 0 };
735 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
736 }
737
738private:
739 PoolingLayerInfo _pool_info;
740};
741
Pablo Tello32521432018-11-15 14:43:10 +0000742/** PriorBox Layer */
743class PriorBoxLayer final : public ILayer
744{
745public:
746 /** Construct a priorbox layer.
747 *
748 * @param[in] sub_stream First graph sub-stream
749 * @param[in] prior_info PriorBox parameters.
750 */
751 PriorBoxLayer(SubStream &&sub_stream, PriorBoxLayerInfo prior_info)
752 : _ss(std::move(sub_stream)), _prior_info(prior_info)
753 {
754 }
755
756 NodeID create_layer(IStream &s) override
757 {
758 NodeParams common_params = { name(), s.hints().target_hint };
759 NodeIdxPair input0 = { s.tail_node(), 0 };
760 NodeIdxPair input1 = { _ss.tail_node(), 0 };
761 return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
762 }
763
764private:
765 SubStream _ss;
766 PriorBoxLayerInfo _prior_info;
767};
768
Gian Marco Iodice23e24792018-09-07 15:32:14 +0100769/** Reorg Layer */
770class ReorgLayer final : public ILayer
771{
772public:
773 /** Construct a reorg layer.
774 *
775 * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
776 * It defines the spatial distance between 2 consecutive pixels in the x and y direction
777 */
778 ReorgLayer(int stride)
779 : _stride(stride)
780 {
781 }
782
783 NodeID create_layer(IStream &s) override
784 {
785 NodeParams common_params = { name(), s.hints().target_hint };
786 NodeIdxPair input = { s.tail_node(), 0 };
787 return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
788 }
789
790private:
791 int _stride;
792};
793
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000794/** Reshape Layer */
795class ReshapeLayer final : public ILayer
796{
797public:
Alex Gildayc357c472018-03-21 13:54:09 +0000798 /** Construct a reshape layer.
799 *
800 * @param[in] shape Target shape.
801 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000802 ReshapeLayer(TensorShape shape)
803 : _shape(shape)
804 {
805 }
806
807 NodeID create_layer(IStream &s) override
808 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100809 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000810 NodeIdxPair input = { s.tail_node(), 0 };
811 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
812 }
813
814private:
815 TensorShape _shape;
816};
817
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100818/** Resize Layer */
819class ResizeLayer final : public ILayer
820{
821public:
822 ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
823 : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
824 {
825 }
826
827 NodeID create_layer(IStream &s) override
828 {
829 NodeParams common_params = { name(), s.hints().target_hint };
830 NodeIdxPair input = { s.tail_node(), 0 };
831 return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
832 }
833
834private:
835 InterpolationPolicy _policy;
836 float _width_scale;
837 float _height_scale;
838};
839
Manuel Bottini3f9d4d72018-10-19 14:04:42 +0100840/** ROIAlign Layer */
841class ROIAlignLayer final : public ILayer
842{
843public:
844 /** Construct a RoiAlign layer.
845 *
846 * @param[in] sub_stream_input Graph sub-stream for the input
847 * @param[in] sub_stream_rois Graph sub-stream for the rois
848 * @param[in] pool_info Pooling information.
849 */
850 ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
851 : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
852 {
853 }
854
855 /** Prevent instances of this class from being copy constructed */
856 ROIAlignLayer(const ROIAlignLayer &) = delete;
857 /** Prevent instances of this class from being copied */
858 ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
859
860 NodeID create_layer(IStream &s) override
861 {
862 NodeParams common_params = { name(), s.hints().target_hint };
863 NodeIdxPair input = { _ss_input.tail_node(), 0 };
864 NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
865 return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
866 }
867
868private:
869 SubStream _ss_input;
870 SubStream _ss_rois;
871 ROIPoolingLayerInfo _pool_info;
872};
873
Isabella Gottardi88d5b222018-04-06 12:24:55 +0100874/** Scale Layer */
875class ScaleLayer final : public ILayer
876{
877public:
878 /** Construct a scale layer.
879 *
880 * @param[in] mul_w Accessor to get mul weight from.
881 * @param[in] add_w Accessor to get add weight from.
882 */
883 ScaleLayer(ITensorAccessorUPtr mul_w,
884 ITensorAccessorUPtr add_w)
885 : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
886 {
887 }
888
889 NodeID create_layer(IStream &s) override
890 {
891 NodeParams common_params = { name(), s.hints().target_hint };
892 NodeIdxPair input = { s.tail_node(), 0 };
893 return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
894 }
895
896private:
897 ITensorAccessorUPtr _mul_w;
898 ITensorAccessorUPtr _add_w;
899};
900
Michele Di Giorgioc30b6682018-09-12 17:44:08 +0100901/** Slice Layer */
902class SliceLayer final : public ILayer
903{
904public:
905 /** Construct a slice layer.
906 *
907 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
908 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
909 */
910 SliceLayer(Coordinates &starts, Coordinates &ends)
911 : _starts(starts), _ends(ends)
912 {
913 }
914
915 NodeID create_layer(IStream &s) override
916 {
917 NodeParams common_params = { name(), s.hints().target_hint };
918 NodeIdxPair input = { s.tail_node(), 0 };
919 return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
920 }
921
922private:
923 Coordinates _starts;
924 Coordinates _ends;
925};
926
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000927/** Softmax Layer */
928class SoftmaxLayer final : public ILayer
929{
930public:
Alex Gildayc357c472018-03-21 13:54:09 +0000931 /** Construct a softmax layer.
932 *
933 * @param[in] beta (Optional) Beta value. Default 1.0.
934 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000935 SoftmaxLayer(float beta = 1.0f)
936 : _beta(beta)
937 {
938 }
939
940 NodeID create_layer(IStream &s) override
941 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100942 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000943 NodeIdxPair input = { s.tail_node(), 0 };
944 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
945 }
946
947private:
948 float _beta;
949};
Michalis Spyrou96f67692018-09-13 11:39:28 +0100950
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +0100951/** Upsample Layer */
952class UpsampleLayer final : public ILayer
953{
954public:
955 /** Construct a Upsample layer.
956 *
957 * @param[in] info Stride info
958 * @param[in] upsampling_policy Upsampling policy
959 */
960 UpsampleLayer(Size2D info, InterpolationPolicy upsampling_policy)
961 : _info(info), _upsampling_policy(upsampling_policy)
962 {
963 }
964
965 NodeID create_layer(IStream &s) override
966 {
967 NodeParams common_params = { name(), s.hints().target_hint };
968 NodeIdxPair input = { s.tail_node(), 0 };
969 return GraphBuilder::add_upsample_node(s.graph(), common_params, input, _info, _upsampling_policy);
970 }
971
972private:
973 Size2D _info;
974 InterpolationPolicy _upsampling_policy;
975};
976
Michalis Spyrou96f67692018-09-13 11:39:28 +0100977/** YOLO Layer */
978class YOLOLayer final : public ILayer
979{
980public:
981 /** Construct a YOLO layer.
982 *
983 * @param[in] act_info Activation info
984 * @param[in] num_classes Number of classes to activate
985 */
986 YOLOLayer(ActivationLayerInfo act_info, int32_t num_classes)
987 : _act_info(act_info), _num_classes(num_classes)
988 {
989 }
990
991 NodeID create_layer(IStream &s) override
992 {
993 NodeParams common_params = { name(), s.hints().target_hint };
994 NodeIdxPair input = { s.tail_node(), 0 };
995 return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info, _num_classes);
996 }
997
998private:
999 ActivationLayerInfo _act_info;
1000 int32_t _num_classes;
1001};
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001002} // namespace frontend
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001003} // namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001004} // namespace arm_compute
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001005#endif /* __ARM_COMPUTE_GRAPH_LAYERS_H__ */