blob: 4e6f0eee2d673ca8e747e5f461446886829d3584 [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
Giuseppe Rossinibb365de2019-02-15 10:24:47 +00002 * Copyright (c) 2018-2019 ARM Limited.
Georgios Pinitasd8734b52017-12-22 15:27:52 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010024#ifndef __ARM_COMPUTE_GRAPH_LAYERS_H__
25#define __ARM_COMPUTE_GRAPH_LAYERS_H__
Georgios Pinitasd8734b52017-12-22 15:27:52 +000026
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010027#include "arm_compute/graph/GraphBuilder.h"
28#include "arm_compute/graph/Types.h"
29#include "arm_compute/graph/frontend/ILayer.h"
30#include "arm_compute/graph/frontend/IStream.h"
31#include "arm_compute/graph/frontend/SubStream.h"
Georgios Pinitasd8734b52017-12-22 15:27:52 +000032
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
Georgios Pinitasd9eb2752018-04-03 13:44:29 +010040namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +000041{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010060 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000061 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
69/** Output Layer */
70class OutputLayer final : public ILayer
71{
72public:
Alex Gildayc357c472018-03-21 13:54:09 +000073 /** Construct an output layer.
74 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +000075 * @param[in] accessor Accessor to give output tensor data to.
76 * @param[in] connection_idx (Optional) Input connection index
Alex Gildayc357c472018-03-21 13:54:09 +000077 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +000078 OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx = 0)
79 : _accessor(std::move(accessor)), _connection_idx(connection_idx)
Georgios Pinitasd8734b52017-12-22 15:27:52 +000080 {
81 }
82
83 NodeID create_layer(IStream &s) override
84 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +010085 NodeParams common_params = { name(), s.hints().target_hint };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +000086 NodeIdxPair input = { s.tail_node(), _connection_idx };
Georgios Pinitasd8734b52017-12-22 15:27:52 +000087 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
88 }
89
90private:
91 ITensorAccessorUPtr _accessor;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +000092 unsigned int _connection_idx;
Georgios Pinitasd8734b52017-12-22 15:27:52 +000093};
94
95/** Activation Layer */
96class ActivationLayer final : public ILayer
97{
98public:
Alex Gildayc357c472018-03-21 13:54:09 +000099 /** Construct an activation layer.
100 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000101 * @param[in] act_info Activation information
102 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000103 */
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000104 ActivationLayer(ActivationLayerInfo act_info,
105 const QuantizationInfo out_quant_info = QuantizationInfo())
106 : _act_info(act_info),
107 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000108 {
109 }
110
111 NodeID create_layer(IStream &s) override
112 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100113 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000114 NodeIdxPair input = { s.tail_node(), 0 };
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000115 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info, std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000116 }
117
118private:
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000119 ActivationLayerInfo _act_info;
120 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000121};
122
123/** Batchnormalization Layer */
124class BatchNormalizationLayer final : public ILayer
125{
126public:
Alex Gildayc357c472018-03-21 13:54:09 +0000127 /** Construct a batch normalization layer.
128 *
129 * @param[in] mean Accessor to get mean tensor data from.
130 * @param[in] var Accessor to get var tensor data from.
131 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
132 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
133 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
134 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000135 BatchNormalizationLayer(ITensorAccessorUPtr mean,
136 ITensorAccessorUPtr var,
137 ITensorAccessorUPtr gamma = nullptr,
138 ITensorAccessorUPtr beta = nullptr,
139 float epsilon = 0.001f)
140 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
141 {
142 }
143
144 NodeID create_layer(IStream &s) override
145 {
146 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
147 ARM_COMPUTE_ERROR_ON(_var == nullptr);
148
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100149 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000150 NodeIdxPair input = { s.tail_node(), 0 };
151 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
152 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
153 }
154
155private:
156 ITensorAccessorUPtr _mean;
157 ITensorAccessorUPtr _var;
158 ITensorAccessorUPtr _gamma;
159 ITensorAccessorUPtr _beta;
160 float _epsilon;
161};
162
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100163/** Bounding Box Transform Layer */
164class BoundingBoxTransformLayer final : public ILayer
165{
166public:
167 /** Construct a bounding box transform layer.
168 *
169 * @param[in] sub_stream_input Graph sub-stream for the input
170 * @param[in] sub_stream_deltas Graph sub-stream for the deltas
171 * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
172 */
173 BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
174 : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
175 {
176 }
177
178 /** Create layer and add to the given stream.
179 *
180 * @param[in] s Stream to add layer to.
181 *
182 * @return ID of the created node.
183 */
184 NodeID create_layer(IStream &s) override
185 {
186 NodeParams common_params = { name(), s.hints().target_hint };
187 NodeIdxPair input = { _ss_input.tail_node(), 0 };
188 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
189 return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
190 }
191
192private:
193 SubStream _ss_input;
194 SubStream _ss_deltas;
195 BoundingBoxTransformInfo _bbox_info;
196};
197
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100198/** Channel Shuffle Layer */
199class ChannelShuffleLayer final : public ILayer
200{
201public:
202 /** Construct a Channel Shuffle layer.
203 *
204 * @param[in] num_groups Number of groups
205 */
206 ChannelShuffleLayer(unsigned int num_groups)
207 : _num_groups(num_groups)
208 {
209 }
210
211 NodeID create_layer(IStream &s) override
212 {
213 NodeParams common_params = { name(), s.hints().target_hint };
214 NodeIdxPair input = { s.tail_node(), 0 };
215 return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
216 }
217
218private:
219 unsigned int _num_groups;
220};
221
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100222/** Concat Layer */
223class ConcatLayer final : public ILayer
224{
225public:
226 /** Construct a concatenation layer
227 *
228 * @param[in] sub_stream1 First graph branch
229 * @param[in] sub_stream2 Second graph branch
230 * @param[in] rest_sub_streams Rest sub-graph branches
231 */
232 template <typename... Ts>
233 ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000234 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Pablo Tello32521432018-11-15 14:43:10 +0000235 {
236 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
237 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
238
239 utility::for_each([&](SubStream && sub_stream)
240 {
241 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
242 },
243 std::move(rest_sub_streams)...);
244 }
245 /** Construct a concatenation layer
246 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000247 * @param[in] concat_descriptor Concat layer descriptor
248 * @param[in] sub_stream1 First graph branch
249 * @param[in] sub_stream2 Second graph branch
250 * @param[in] rest_sub_streams Rest sub-graph branches
Pablo Tello32521432018-11-15 14:43:10 +0000251 */
252 template <typename... Ts>
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000253 ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
254 : _sub_streams(), _concat_descriptor(concat_descriptor)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100255 {
256 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
257 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
258
259 utility::for_each([&](SubStream && sub_stream)
260 {
261 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
262 },
263 std::move(rest_sub_streams)...);
264 }
265 /** Construct a concat layer
266 *
267 * @param[in] sub_stream Sub-stream
268 */
269 template <typename... Ts>
270 ConcatLayer(SubStream &&sub_stream)
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000271 : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100272 {
273 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
274 }
275 NodeID create_layer(IStream &s) override
276 {
277 NodeID nid = EmptyNodeID;
278 NodeParams common_params = { name(), s.hints().target_hint };
279 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
280 {
281 nid = _sub_streams[0]->tail_node();
282 }
283 else
284 {
285 // Collect tail nodes and concatenate
286 std::vector<NodeIdxPair> nodes;
287 for(auto &ss : _sub_streams)
288 {
289 if(ss && (ss->tail_node() != EmptyNodeID))
290 {
291 const auto tail_node = s.graph().node(ss->tail_node());
292 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
293 {
294 nodes.push_back({ ss->tail_node(), 0 });
295 }
296 }
297 }
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000298 nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _concat_descriptor);
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100299 }
300 return nid;
301 }
302
303private:
304 std::vector<std::unique_ptr<SubStream>> _sub_streams;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000305 descriptors::ConcatLayerDescriptor _concat_descriptor;
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100306};
307
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000308/** Convolution Layer */
309class ConvolutionLayer final : public ILayer
310{
311public:
Alex Gildayc357c472018-03-21 13:54:09 +0000312 /** Construct a convolution layer.
313 *
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100314 * @param[in] conv_width Convolution width.
315 * @param[in] conv_height Convolution height.
316 * @param[in] ofm Output feature map.
317 * @param[in] weights Accessor to get kernel weights from.
318 * @param[in] bias Accessor to get kernel bias from.
319 * @param[in] conv_info Padding and stride information.
320 * @param[in] num_groups (Optional) Number of groups. Default: 1.
321 * @param[in] weights_quant_info (Optional) Weights quantization information
322 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000323 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100324 ConvolutionLayer(unsigned int conv_width,
325 unsigned int conv_height,
326 unsigned int ofm,
327 ITensorAccessorUPtr weights,
328 ITensorAccessorUPtr bias,
329 PadStrideInfo conv_info,
330 unsigned int num_groups = 1,
331 const QuantizationInfo weights_quant_info = QuantizationInfo(),
332 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000333 : _conv_width(conv_width),
334 _conv_height(conv_height),
335 _ofm(ofm),
336 _conv_info(std::move(conv_info)),
337 _num_groups(num_groups),
338 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100339 _bias(std::move(bias)),
340 _weights_quant_info(std::move(weights_quant_info)),
341 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000342 {
343 }
344
345 NodeID create_layer(IStream &s) override
346 {
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000347 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100348 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000349 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
Georgios Pinitasee33ea52018-03-08 16:01:29 +0000350 Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
Giorgio Arena59631a12018-05-02 13:59:04 +0100351 s.hints().convolution_method_hint, s.hints().fast_math_hint,
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100352 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000353 }
354
355private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100356 unsigned int _conv_width;
357 unsigned int _conv_height;
358 unsigned int _ofm;
359 const PadStrideInfo _conv_info;
360 unsigned int _num_groups;
361 ITensorAccessorUPtr _weights;
362 ITensorAccessorUPtr _bias;
363 const QuantizationInfo _weights_quant_info;
364 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000365};
366
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100367/** Deconvolution Layer */
368class DeconvolutionLayer final : public ILayer
369{
370public:
371 /** Construct a convolution layer.
372 *
373 * @param[in] conv_width Convolution width.
374 * @param[in] conv_height Convolution height.
375 * @param[in] ofm Output feature map.
376 * @param[in] weights Accessor to get kernel weights from.
377 * @param[in] bias Accessor to get kernel bias from.
378 * @param[in] deconv_info Padding and stride information.
379 * @param[in] inner_border Inner border padding (right, top)
380 */
381 DeconvolutionLayer(unsigned int conv_width,
382 unsigned int conv_height,
383 unsigned int ofm,
384 ITensorAccessorUPtr weights,
385 ITensorAccessorUPtr bias,
386 PadStrideInfo deconv_info,
387 Size2D inner_border)
388 : _conv_width(conv_width),
389 _conv_height(conv_height),
390 _ofm(ofm),
391 _deconv_info(std::move(deconv_info)),
392 _inner_border(inner_border),
393 _weights(std::move(weights)),
394 _bias(std::move(bias))
395 {
396 }
397
398 NodeID create_layer(IStream &s) override
399 {
400 NodeIdxPair input = { s.tail_node(), 0 };
401 NodeParams common_params = { name(), s.hints().target_hint };
402 return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
403 Size2D(_conv_width, _conv_height), _ofm, _deconv_info, _inner_border,
404 std::move(_weights), std::move(_bias));
405 }
406
407private:
408 unsigned int _conv_width;
409 unsigned int _conv_height;
410 unsigned int _ofm;
411 const PadStrideInfo _deconv_info;
412 Size2D _inner_border;
413 ITensorAccessorUPtr _weights;
414 ITensorAccessorUPtr _bias;
415};
416
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000417/** Depthwise Convolution Layer */
418class DepthwiseConvolutionLayer final : public ILayer
419{
420public:
Alex Gildayc357c472018-03-21 13:54:09 +0000421 /** Construct a depthwise convolution layer.
422 *
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000423 * @param[in] conv_width Convolution width.
424 * @param[in] conv_height Convolution height.
425 * @param[in] weights Accessor to get kernel weights from.
426 * @param[in] bias Accessor to get kernel bias from.
427 * @param[in] conv_info Padding and stride information.
428 * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
429 * @param[in] weights_quant_info (Optional) Quantization info used for weights
430 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000431 */
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100432 DepthwiseConvolutionLayer(unsigned int conv_width,
433 unsigned int conv_height,
434 ITensorAccessorUPtr weights,
435 ITensorAccessorUPtr bias,
436 PadStrideInfo conv_info,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000437 int depth_multiplier = 1,
438 const QuantizationInfo weights_quant_info = QuantizationInfo(),
439 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000440 : _conv_width(conv_width),
441 _conv_height(conv_height),
442 _conv_info(std::move(conv_info)),
443 _weights(std::move(weights)),
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100444 _bias(std::move(bias)),
Georgios Pinitas05045c12018-12-07 18:31:47 +0000445 _depth_multiplier(depth_multiplier),
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000446 _weights_quant_info(std::move(weights_quant_info)),
447 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000448 {
449 }
450
451 NodeID create_layer(IStream &s) override
452 {
453 NodeIdxPair input = { s.tail_node(), 0 };
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100454 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000455 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
Georgios Pinitas05045c12018-12-07 18:31:47 +0000456 input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000457 s.hints().depthwise_convolution_method_hint,
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000458 std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000459 }
460
461private:
Giorgio Arenabb54e4e2018-04-05 17:20:34 +0100462 unsigned int _conv_width;
463 unsigned int _conv_height;
464 const PadStrideInfo _conv_info;
465 ITensorAccessorUPtr _weights;
466 ITensorAccessorUPtr _bias;
Georgios Pinitas05045c12018-12-07 18:31:47 +0000467 int _depth_multiplier;
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000468 const QuantizationInfo _weights_quant_info;
469 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000470};
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000471/** DetectionOutput Layer */
472class DetectionOutputLayer final : public ILayer
473{
474public:
475 /** Construct a detection output layer.
476 *
477 * @param[in] sub_stream_conf Confidence graph sub-stream.
478 * @param[in] sub_stream_prior PriorBox graph sub-stream.
479 * @param[in] detect_info DetectionOutput parameters.
480 */
481 DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, DetectionOutputLayerInfo detect_info)
482 : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
483 {
484 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000485
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000486 NodeID create_layer(IStream &s) override
487 {
488 NodeParams common_params = { name(), s.hints().target_hint };
489 NodeIdxPair input_loc = { s.tail_node(), 0 };
490 NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
491 NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
492 return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
493 }
494
495private:
496 SubStream _ss_conf;
497 SubStream _ss_prior;
498 DetectionOutputLayerInfo _detect_info;
499};
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100500/** Dummy Layer */
501class DummyLayer final : public ILayer
502{
503public:
504 /** Construct an input layer.
505 *
506 * @param[in] shape Output shape
507 */
508 DummyLayer(TensorShape shape)
509 : _shape(shape)
510 {
511 }
512
513 NodeID create_layer(IStream &s) override
514 {
515 NodeParams common_params = { name(), s.hints().target_hint };
516 NodeIdxPair input = { s.tail_node(), 0 };
517 return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
518 }
519
520private:
521 TensorShape _shape;
522};
523
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100524class EltwiseLayer final : public ILayer
525{
526public:
527 /** Construct an element-wise operation layer
528 *
529 * @param[in] sub_stream0 First graph sub-stream
530 * @param[in] sub_stream1 First graph sub-stream
531 * @param[in] op Element-wise operation to perform
532 */
533 EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
534 : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
535 {
536 }
537
538 NodeID create_layer(IStream &s) override
539 {
540 NodeParams common_params = { name(), s.hints().target_hint };
541 NodeIdxPair input0 = { _ss0.tail_node(), 0 };
542 NodeIdxPair input1 = { _ss1.tail_node(), 0 };
543
544 return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
545 }
546
547private:
548 SubStream _ss0;
549 SubStream _ss1;
550 EltwiseOperation _op;
551};
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000552/** Flatten Layer */
553class FlattenLayer final : public ILayer
554{
555public:
Alex Gildayc357c472018-03-21 13:54:09 +0000556 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000557 FlattenLayer()
558 {
559 }
560
561 NodeID create_layer(IStream &s) override
562 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100563 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000564 NodeIdxPair input = { s.tail_node(), 0 };
565 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
566 }
567};
568
569/** Fully Connected Layer */
570class FullyConnectedLayer final : public ILayer
571{
572public:
Alex Gildayc357c472018-03-21 13:54:09 +0000573 /** Construct a fully connected layer.
574 *
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100575 * @param[in] num_outputs Number of outputs.
576 * @param[in] weights Accessor to get weights from.
577 * @param[in] bias Accessor to get bias from.
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100578 * @param[in] fc_info (Optional) Fully connected layer metadata
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100579 * @param[in] weights_quant_info (Optional) Weights quantization information
580 * @param[in] out_quant_info (Optional) Output quantization info
Alex Gildayc357c472018-03-21 13:54:09 +0000581 */
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100582 FullyConnectedLayer(unsigned int num_outputs,
583 ITensorAccessorUPtr weights,
584 ITensorAccessorUPtr bias,
585 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
586 const QuantizationInfo weights_quant_info = QuantizationInfo(),
587 const QuantizationInfo out_quant_info = QuantizationInfo())
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100588 : _num_outputs(num_outputs),
589 _weights(std::move(weights)),
590 _bias(std::move(bias)),
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000591 _weights_ss(nullptr),
592 _bias_ss(nullptr),
593 _fc_info(fc_info),
594 _weights_quant_info(std::move(weights_quant_info)),
595 _out_quant_info(std::move(out_quant_info))
596 {
597 }
598
599 /** Construct a fully connected layer.
600 *
601 * @param[in] num_outputs Number of outputs.
602 * @param[in] sub_stream_weights Graph sub-stream for the weights.
603 * @param[in] sub_stream_bias Graph sub-stream for the bias.
604 * @param[in] fc_info (Optional) Fully connected layer metadata
605 * @param[in] weights_quant_info (Optional) Weights quantization information
606 * @param[in] out_quant_info (Optional) Output quantization info
607 */
608 FullyConnectedLayer(unsigned int num_outputs,
609 SubStream &&sub_stream_weights,
610 SubStream &&sub_stream_bias,
611 const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
612 const QuantizationInfo weights_quant_info = QuantizationInfo(),
613 const QuantizationInfo out_quant_info = QuantizationInfo())
614 : _num_outputs(num_outputs),
615 _weights(nullptr),
616 _bias(nullptr),
617 _weights_ss(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream_weights))),
618 _bias_ss(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream_bias))),
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100619 _fc_info(fc_info),
Georgios Pinitas2f1366a2018-07-31 16:33:06 +0100620 _weights_quant_info(std::move(weights_quant_info)),
621 _out_quant_info(std::move(out_quant_info))
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000622 {
623 }
624
Michele Di Giorgio47e6fed2018-11-13 12:04:25 +0000625 /** Create layer and add to the given stream.
626 *
627 * @param[in] s Stream to add layer to.
628 *
629 * @return ID of the created node.
630 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000631 NodeID create_layer(IStream &s) override
632 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100633 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000634 NodeIdxPair input = { s.tail_node(), 0 };
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000635 if(_weights != nullptr)
636 {
637 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
638 std::move(_weights), std::move(_bias), _fc_info,
639 std::move(_weights_quant_info), std::move(_out_quant_info));
640 }
641 else
642 {
643 ARM_COMPUTE_ERROR_ON(_weights_ss == nullptr);
644
645 NodeID bias_nid = (_bias_ss == nullptr) ? EmptyNodeID : _bias_ss->tail_node();
646 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
647 _weights_ss->tail_node(), bias_nid, _fc_info,
648 std::move(_out_quant_info));
649 }
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000650 }
651
652private:
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100653 unsigned int _num_outputs;
654 ITensorAccessorUPtr _weights;
655 ITensorAccessorUPtr _bias;
Michele Di Giorgioa42f55f2019-03-08 14:52:17 +0000656 std::unique_ptr<SubStream> _weights_ss;
657 std::unique_ptr<SubStream> _bias_ss;
Georgios Pinitasc55cef12018-08-01 15:24:18 +0100658 const FullyConnectedLayerInfo _fc_info;
659 const QuantizationInfo _weights_quant_info;
660 const QuantizationInfo _out_quant_info;
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000661};
662
Manuel Bottini5209be52019-02-13 16:34:56 +0000663/** Generate Proposals Layer */
664class GenerateProposalsLayer final : public ILayer
665{
666public:
667 /** Construct a generate proposals layer.
668 *
669 * @param[in] ss_scores Graph sub-stream for the scores.
670 * @param[in] ss_deltas Graph sub-stream for the deltas.
671 * @param[in] ss_anchors Graph sub-stream for the anchors.
672 * @param[in] info Generate Proposals operation information.
673 */
674 GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
675 : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
676 {
677 }
678
679 /** Create layer and add to the given stream.
680 *
681 * @param[in] s Stream to add layer to.
682 *
683 * @return ID of the created node.
684 */
685 NodeID create_layer(IStream &s) override
686 {
687 NodeParams common_params = { name(), s.hints().target_hint };
688 NodeIdxPair scores = { _ss_scores.tail_node(), 0 };
689 NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
690 NodeIdxPair anchors = { _ss_anchors.tail_node(), 0 };
691 return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
692 }
693
694private:
695 SubStream _ss_scores;
696 SubStream _ss_deltas;
697 SubStream _ss_anchors;
698 GenerateProposalsInfo _info;
699};
700
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000701/** Normalization Layer */
702class NormalizationLayer final : public ILayer
703{
704public:
Alex Gildayc357c472018-03-21 13:54:09 +0000705 /** Construct a normalization layer.
706 *
707 * @param[in] norm_info Normalization information.
708 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000709 NormalizationLayer(NormalizationLayerInfo norm_info)
710 : _norm_info(norm_info)
711 {
712 }
713
714 NodeID create_layer(IStream &s) override
715 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100716 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000717 NodeIdxPair input = { s.tail_node(), 0 };
718 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
719 }
720
721private:
722 NormalizationLayerInfo _norm_info;
723};
724
Michele Di Giorgio555d1102018-09-12 13:51:59 +0100725/** Normalize planar YUV Layer */
726class NormalizePlanarYUVLayer final : public ILayer
727{
728public:
729 /** Construct a normalize planar YUV layer.
730 *
731 * @param[in] mean Accessor to get mean tensor data from.
732 * @param[in] std Accessor to get std tensor data from.
733 */
734 NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
735 ITensorAccessorUPtr std)
736 : _mean(std::move(mean)), _std(std::move(std))
737 {
738 }
739
740 NodeID create_layer(IStream &s) override
741 {
742 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
743 ARM_COMPUTE_ERROR_ON(_std == nullptr);
744
745 NodeParams common_params = { name(), s.hints().target_hint };
746 NodeIdxPair input = { s.tail_node(), 0 };
747 return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
748 std::move(_mean), std::move(_std));
749 }
750
751private:
752 ITensorAccessorUPtr _mean;
753 ITensorAccessorUPtr _std;
754};
755
Michele Di Giorgio4bb17332018-09-26 13:56:51 +0100756/** Pad Layer */
757class PadLayer final : public ILayer
758{
759public:
760 /** Construct a pad layer.
761 *
762 * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
763 * specifies the front and the end padding in the i-th dimension.
764 */
765 PadLayer(PaddingList padding)
766 : _padding(padding)
767 {
768 }
769
770 NodeID create_layer(IStream &s) override
771 {
772 NodeParams common_params = { name(), s.hints().target_hint };
773 NodeIdxPair input = { s.tail_node(), 0 };
774 return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding);
775 }
776
777private:
778 PaddingList _padding;
779};
780
Georgios Pinitas57c48242018-08-02 13:41:49 +0100781/** Permute Layer */
782class PermuteLayer final : public ILayer
783{
784public:
785 /** Construct a permute layer.
786 *
787 * @param[in] perm Permutation vector.
788 * @param[in] layout (Optional) Data layout to assign to permuted tensor.
789 * If UNKNOWN then the input's layout will be used.
790 */
791 PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
792 : _perm(perm), _layout(layout)
793 {
794 }
795
796 NodeID create_layer(IStream &s) override
797 {
798 NodeParams common_params = { name(), s.hints().target_hint };
799 NodeIdxPair input = { s.tail_node(), 0 };
800 return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
801 }
802
803private:
804 PermutationVector _perm;
805 DataLayout _layout;
806};
807
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000808/** Pooling Layer */
809class PoolingLayer final : public ILayer
810{
811public:
Alex Gildayc357c472018-03-21 13:54:09 +0000812 /** Construct a pooling layer.
813 *
814 * @param[in] pool_info Pooling information.
815 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000816 PoolingLayer(PoolingLayerInfo pool_info)
817 : _pool_info(pool_info)
818 {
819 }
820
821 NodeID create_layer(IStream &s) override
822 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100823 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000824 NodeIdxPair input = { s.tail_node(), 0 };
825 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
826 }
827
828private:
829 PoolingLayerInfo _pool_info;
830};
831
Pablo Tello32521432018-11-15 14:43:10 +0000832/** PriorBox Layer */
833class PriorBoxLayer final : public ILayer
834{
835public:
836 /** Construct a priorbox layer.
837 *
838 * @param[in] sub_stream First graph sub-stream
839 * @param[in] prior_info PriorBox parameters.
840 */
841 PriorBoxLayer(SubStream &&sub_stream, PriorBoxLayerInfo prior_info)
842 : _ss(std::move(sub_stream)), _prior_info(prior_info)
843 {
844 }
845
846 NodeID create_layer(IStream &s) override
847 {
848 NodeParams common_params = { name(), s.hints().target_hint };
849 NodeIdxPair input0 = { s.tail_node(), 0 };
850 NodeIdxPair input1 = { _ss.tail_node(), 0 };
851 return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
852 }
853
854private:
855 SubStream _ss;
856 PriorBoxLayerInfo _prior_info;
857};
858
Gian Marco Iodice23e24792018-09-07 15:32:14 +0100859/** Reorg Layer */
860class ReorgLayer final : public ILayer
861{
862public:
863 /** Construct a reorg layer.
864 *
865 * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
866 * It defines the spatial distance between 2 consecutive pixels in the x and y direction
867 */
868 ReorgLayer(int stride)
869 : _stride(stride)
870 {
871 }
872
873 NodeID create_layer(IStream &s) override
874 {
875 NodeParams common_params = { name(), s.hints().target_hint };
876 NodeIdxPair input = { s.tail_node(), 0 };
877 return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
878 }
879
880private:
881 int _stride;
882};
883
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000884/** Reshape Layer */
885class ReshapeLayer final : public ILayer
886{
887public:
Alex Gildayc357c472018-03-21 13:54:09 +0000888 /** Construct a reshape layer.
889 *
890 * @param[in] shape Target shape.
891 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000892 ReshapeLayer(TensorShape shape)
893 : _shape(shape)
894 {
895 }
896
897 NodeID create_layer(IStream &s) override
898 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +0100899 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000900 NodeIdxPair input = { s.tail_node(), 0 };
901 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
902 }
903
904private:
905 TensorShape _shape;
906};
907
Georgios Pinitas087eaf62018-05-16 15:52:35 +0100908/** Resize Layer */
909class ResizeLayer final : public ILayer
910{
911public:
912 ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
913 : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
914 {
915 }
916
917 NodeID create_layer(IStream &s) override
918 {
919 NodeParams common_params = { name(), s.hints().target_hint };
920 NodeIdxPair input = { s.tail_node(), 0 };
921 return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
922 }
923
924private:
925 InterpolationPolicy _policy;
926 float _width_scale;
927 float _height_scale;
928};
929
Manuel Bottini3f9d4d72018-10-19 14:04:42 +0100930/** ROIAlign Layer */
931class ROIAlignLayer final : public ILayer
932{
933public:
934 /** Construct a RoiAlign layer.
935 *
936 * @param[in] sub_stream_input Graph sub-stream for the input
937 * @param[in] sub_stream_rois Graph sub-stream for the rois
938 * @param[in] pool_info Pooling information.
939 */
940 ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
941 : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
942 {
943 }
944
945 /** Prevent instances of this class from being copy constructed */
946 ROIAlignLayer(const ROIAlignLayer &) = delete;
947 /** Prevent instances of this class from being copied */
948 ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
949
950 NodeID create_layer(IStream &s) override
951 {
952 NodeParams common_params = { name(), s.hints().target_hint };
953 NodeIdxPair input = { _ss_input.tail_node(), 0 };
954 NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
955 return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
956 }
957
958private:
959 SubStream _ss_input;
960 SubStream _ss_rois;
961 ROIPoolingLayerInfo _pool_info;
962};
963
Isabella Gottardi88d5b222018-04-06 12:24:55 +0100964/** Scale Layer */
965class ScaleLayer final : public ILayer
966{
967public:
968 /** Construct a scale layer.
969 *
970 * @param[in] mul_w Accessor to get mul weight from.
971 * @param[in] add_w Accessor to get add weight from.
972 */
973 ScaleLayer(ITensorAccessorUPtr mul_w,
974 ITensorAccessorUPtr add_w)
975 : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
976 {
977 }
978
979 NodeID create_layer(IStream &s) override
980 {
981 NodeParams common_params = { name(), s.hints().target_hint };
982 NodeIdxPair input = { s.tail_node(), 0 };
983 return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
984 }
985
986private:
987 ITensorAccessorUPtr _mul_w;
988 ITensorAccessorUPtr _add_w;
989};
990
Michele Di Giorgioc30b6682018-09-12 17:44:08 +0100991/** Slice Layer */
992class SliceLayer final : public ILayer
993{
994public:
995 /** Construct a slice layer.
996 *
997 * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
998 * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
999 */
1000 SliceLayer(Coordinates &starts, Coordinates &ends)
1001 : _starts(starts), _ends(ends)
1002 {
1003 }
1004
1005 NodeID create_layer(IStream &s) override
1006 {
1007 NodeParams common_params = { name(), s.hints().target_hint };
1008 NodeIdxPair input = { s.tail_node(), 0 };
1009 return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
1010 }
1011
1012private:
1013 Coordinates _starts;
1014 Coordinates _ends;
1015};
1016
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001017/** Softmax Layer */
1018class SoftmaxLayer final : public ILayer
1019{
1020public:
Alex Gildayc357c472018-03-21 13:54:09 +00001021 /** Construct a softmax layer.
1022 *
1023 * @param[in] beta (Optional) Beta value. Default 1.0.
1024 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001025 SoftmaxLayer(float beta = 1.0f)
1026 : _beta(beta)
1027 {
1028 }
1029
1030 NodeID create_layer(IStream &s) override
1031 {
Georgios Pinitas5c2fb3f2018-05-01 15:26:20 +01001032 NodeParams common_params = { name(), s.hints().target_hint };
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001033 NodeIdxPair input = { s.tail_node(), 0 };
1034 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
1035 }
1036
1037private:
1038 float _beta;
1039};
Michalis Spyrou96f67692018-09-13 11:39:28 +01001040
Michele Di Giorgioec699752019-03-22 15:25:32 +00001041/** Stack Layer */
1042class StackLayer final : public ILayer
1043{
1044public:
1045 /** Construct a concatenation layer
1046 *
1047 * @param[in] sub_stream1 First graph branch
1048 * @param[in] sub_stream2 Second graph branch
1049 * @param[in] rest_sub_streams Rest sub-graph branches
1050 */
1051 template <typename... Ts>
1052 StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1053 : _sub_streams(), _axis(0)
1054 {
1055 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
1056 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
1057
1058 utility::for_each([&](SubStream && sub_stream)
1059 {
1060 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1061 },
1062 std::move(rest_sub_streams)...);
1063 }
1064 /** Construct a concatenation layer
1065 *
1066 * @param[in] axis Stack layer axis along which to stack the inputs
1067 * @param[in] sub_stream1 First graph branch
1068 * @param[in] sub_stream2 Second graph branch
1069 * @param[in] rest_sub_streams Rest sub-graph branches
1070 */
1071 template <typename... Ts>
1072 StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1073 : _sub_streams(), _axis(axis)
1074 {
1075 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
1076 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
1077
1078 utility::for_each([&](SubStream && sub_stream)
1079 {
1080 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1081 },
1082 std::move(rest_sub_streams)...);
1083 }
1084 /** Construct a concat layer
1085 *
1086 * @param[in] sub_stream Sub-stream
1087 */
1088 template <typename... Ts>
1089 StackLayer(SubStream &&sub_stream)
1090 : _sub_streams(), _axis(0)
1091 {
1092 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
1093 }
1094 NodeID create_layer(IStream &s) override
1095 {
1096 NodeID nid = EmptyNodeID;
1097 NodeParams common_params = { name(), s.hints().target_hint };
1098 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
1099 {
1100 nid = _sub_streams[0]->tail_node();
1101 }
1102 else
1103 {
1104 // Collect tail nodes and stack
1105 std::vector<NodeIdxPair> nodes;
1106 for(auto &ss : _sub_streams)
1107 {
1108 if(ss && (ss->tail_node() != EmptyNodeID))
1109 {
1110 const auto tail_node = s.graph().node(ss->tail_node());
1111 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
1112 {
1113 nodes.push_back({ ss->tail_node(), 0 });
1114 }
1115 }
1116 }
1117 nid = GraphBuilder::add_stack_node(s.graph(), common_params, nodes, _axis);
1118 }
1119 return nid;
1120 }
1121
1122private:
1123 std::vector<std::unique_ptr<SubStream>> _sub_streams;
1124 int _axis;
1125};
1126
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001127/** Upsample Layer */
1128class UpsampleLayer final : public ILayer
1129{
1130public:
1131 /** Construct a Upsample layer.
1132 *
1133 * @param[in] info Stride info
1134 * @param[in] upsampling_policy Upsampling policy
1135 */
1136 UpsampleLayer(Size2D info, InterpolationPolicy upsampling_policy)
1137 : _info(info), _upsampling_policy(upsampling_policy)
1138 {
1139 }
1140
1141 NodeID create_layer(IStream &s) override
1142 {
1143 NodeParams common_params = { name(), s.hints().target_hint };
1144 NodeIdxPair input = { s.tail_node(), 0 };
1145 return GraphBuilder::add_upsample_node(s.graph(), common_params, input, _info, _upsampling_policy);
1146 }
1147
1148private:
1149 Size2D _info;
1150 InterpolationPolicy _upsampling_policy;
1151};
1152
Michalis Spyrou96f67692018-09-13 11:39:28 +01001153/** YOLO Layer */
1154class YOLOLayer final : public ILayer
1155{
1156public:
1157 /** Construct a YOLO layer.
1158 *
1159 * @param[in] act_info Activation info
1160 * @param[in] num_classes Number of classes to activate
1161 */
1162 YOLOLayer(ActivationLayerInfo act_info, int32_t num_classes)
1163 : _act_info(act_info), _num_classes(num_classes)
1164 {
1165 }
1166
1167 NodeID create_layer(IStream &s) override
1168 {
1169 NodeParams common_params = { name(), s.hints().target_hint };
1170 NodeIdxPair input = { s.tail_node(), 0 };
1171 return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info, _num_classes);
1172 }
1173
1174private:
1175 ActivationLayerInfo _act_info;
1176 int32_t _num_classes;
1177};
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001178} // namespace frontend
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001179} // namespace graph
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001180} // namespace arm_compute
Georgios Pinitasd9eb2752018-04-03 13:44:29 +01001181#endif /* __ARM_COMPUTE_GRAPH_LAYERS_H__ */