blob: 7ea23e06844c2517248b0121c39e6a1d787b9632 [file] [log] [blame]
Georgios Pinitasd8734b52017-12-22 15:27:52 +00001/*
2 * Copyright (c) 2018 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef __ARM_COMPUTE_GRAPH2_LAYERS_H__
25#define __ARM_COMPUTE_GRAPH2_LAYERS_H__
26
27#include "arm_compute/graph2/GraphBuilder.h"
28#include "arm_compute/graph2/Types.h"
29#include "arm_compute/graph2/frontend/ILayer.h"
30#include "arm_compute/graph2/frontend/IStream.h"
31#include "arm_compute/graph2/frontend/SubStream.h"
32
33#include "arm_compute/core/utils/misc/Utility.h"
34
35#include <memory>
36#include <string>
37
38namespace arm_compute
39{
40namespace graph2
41{
42namespace frontend
43{
44/** Input Layer */
45class InputLayer final : public ILayer
46{
47public:
Alex Gildayc357c472018-03-21 13:54:09 +000048 /** Construct an input layer.
49 *
50 * @param[in] desc Description of input tensor.
51 * @param[in] accessor Accessor to get input tensor data from.
52 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000053 InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54 : _desc(desc), _accessor(std::move(accessor))
55 {
56 }
57
58 NodeID create_layer(IStream &s) override
59 {
60 NodeParams common_params = { "", s.hints().target_hint };
61 return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62 }
63
64private:
65 TensorDescriptor _desc;
66 ITensorAccessorUPtr _accessor;
67};
68
69/** Output Layer */
70class OutputLayer final : public ILayer
71{
72public:
Alex Gildayc357c472018-03-21 13:54:09 +000073 /** Construct an output layer.
74 *
75 * @param[in] accessor Accessor to give output tensor data to.
76 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +000077 OutputLayer(ITensorAccessorUPtr accessor)
78 : _accessor(std::move(accessor))
79 {
80 }
81
82 NodeID create_layer(IStream &s) override
83 {
84 NodeParams common_params = { "", s.hints().target_hint };
85 NodeIdxPair input = { s.tail_node(), 0 };
86 return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
87 }
88
89private:
90 ITensorAccessorUPtr _accessor;
91};
92
93/** Activation Layer */
94class ActivationLayer final : public ILayer
95{
96public:
Alex Gildayc357c472018-03-21 13:54:09 +000097 /** Construct an activation layer.
98 *
99 * @param[in] act_info Activation information
100 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000101 ActivationLayer(ActivationLayerInfo act_info)
102 : _act_info(act_info)
103 {
104 }
105
106 NodeID create_layer(IStream &s) override
107 {
108 NodeParams common_params = { "", s.hints().target_hint };
109 NodeIdxPair input = { s.tail_node(), 0 };
110 return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info);
111 }
112
113private:
114 ActivationLayerInfo _act_info;
115};
116
117/** Batchnormalization Layer */
118class BatchNormalizationLayer final : public ILayer
119{
120public:
Alex Gildayc357c472018-03-21 13:54:09 +0000121 /** Construct a batch normalization layer.
122 *
123 * @param[in] mean Accessor to get mean tensor data from.
124 * @param[in] var Accessor to get var tensor data from.
125 * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
126 * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
127 * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
128 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000129 BatchNormalizationLayer(ITensorAccessorUPtr mean,
130 ITensorAccessorUPtr var,
131 ITensorAccessorUPtr gamma = nullptr,
132 ITensorAccessorUPtr beta = nullptr,
133 float epsilon = 0.001f)
134 : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
135 {
136 }
137
138 NodeID create_layer(IStream &s) override
139 {
140 ARM_COMPUTE_ERROR_ON(_mean == nullptr);
141 ARM_COMPUTE_ERROR_ON(_var == nullptr);
142
143 NodeParams common_params = { "", s.hints().target_hint };
144 NodeIdxPair input = { s.tail_node(), 0 };
145 return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
146 std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
147 }
148
149private:
150 ITensorAccessorUPtr _mean;
151 ITensorAccessorUPtr _var;
152 ITensorAccessorUPtr _gamma;
153 ITensorAccessorUPtr _beta;
154 float _epsilon;
155};
156
157/** Convolution Layer */
158class ConvolutionLayer final : public ILayer
159{
160public:
Alex Gildayc357c472018-03-21 13:54:09 +0000161 /** Construct a convolution layer.
162 *
163 * @param[in] conv_width Convolution width.
164 * @param[in] conv_height Convolution height.
165 * @param[in] ofm Output feature map.
166 * @param[in] weights Accessor to get kernel weights from.
167 * @param[in] bias Accessor to get kernel bias from.
168 * @param[in] conv_info Padding and stride information.
169 * @param[in] num_groups (Optional) Number of groups. Default: 1.
170 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000171 ConvolutionLayer(unsigned int conv_width,
172 unsigned int conv_height,
173 unsigned int ofm,
174 ITensorAccessorUPtr weights,
175 ITensorAccessorUPtr bias,
176 PadStrideInfo conv_info,
177 unsigned int num_groups = 1)
178 : _conv_width(conv_width),
179 _conv_height(conv_height),
180 _ofm(ofm),
181 _conv_info(std::move(conv_info)),
182 _num_groups(num_groups),
183 _weights(std::move(weights)),
184 _bias(std::move(bias))
185 {
186 }
187
188 NodeID create_layer(IStream &s) override
189 {
190 ARM_COMPUTE_UNUSED(_num_groups);
191 NodeIdxPair input = { s.tail_node(), 0 };
192 NodeParams common_params = { "", s.hints().target_hint };
193 return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
194 Size2D(_conv_width, _conv_height), _ofm, _conv_info,
195 s.hints().convolution_method_hint,
196 std::move(_weights), std::move(_bias));
197 }
198
199private:
200 unsigned int _conv_width;
201 unsigned int _conv_height;
202 unsigned int _ofm;
203 const PadStrideInfo _conv_info;
204 unsigned int _num_groups;
205 ITensorAccessorUPtr _weights;
206 ITensorAccessorUPtr _bias;
207};
208
209/** Depthwise Convolution Layer */
210class DepthwiseConvolutionLayer final : public ILayer
211{
212public:
Alex Gildayc357c472018-03-21 13:54:09 +0000213 /** Construct a depthwise convolution layer.
214 *
215 * @param[in] conv_width Convolution width.
216 * @param[in] conv_height Convolution height.
217 * @param[in] weights Accessor to get kernel weights from.
218 * @param[in] bias Accessor to get kernel bias from.
219 * @param[in] conv_info Padding and stride information.
220 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000221 DepthwiseConvolutionLayer(unsigned int conv_width,
222 unsigned int conv_height,
223 ITensorAccessorUPtr weights,
224 ITensorAccessorUPtr bias,
225 PadStrideInfo conv_info)
226 : _conv_width(conv_width),
227 _conv_height(conv_height),
228 _conv_info(std::move(conv_info)),
229 _weights(std::move(weights)),
230 _bias(std::move(bias))
231 {
232 }
233
234 NodeID create_layer(IStream &s) override
235 {
236 NodeIdxPair input = { s.tail_node(), 0 };
237 NodeParams common_params = { "", s.hints().target_hint };
238 return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
239 input, Size2D(_conv_width, _conv_height), _conv_info,
240 s.hints().depthwise_convolution_method_hint,
241 std::move(_weights), std::move(_bias));
242 }
243
244private:
245 unsigned int _conv_width;
246 unsigned int _conv_height;
247 const PadStrideInfo _conv_info;
248 ITensorAccessorUPtr _weights;
249 ITensorAccessorUPtr _bias;
250};
251
252/** Flatten Layer */
253class FlattenLayer final : public ILayer
254{
255public:
Alex Gildayc357c472018-03-21 13:54:09 +0000256 /** Construct a flatten layer. */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000257 FlattenLayer()
258 {
259 }
260
261 NodeID create_layer(IStream &s) override
262 {
263 NodeParams common_params = { "", s.hints().target_hint };
264 NodeIdxPair input = { s.tail_node(), 0 };
265 return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
266 }
267};
268
269/** Fully Connected Layer */
270class FullyConnectedLayer final : public ILayer
271{
272public:
Alex Gildayc357c472018-03-21 13:54:09 +0000273 /** Construct a fully connected layer.
274 *
275 * @param[in] num_outputs Number of outputs.
276 * @param[in] weights Accessor to get weights from.
277 * @param[in] bias Accessor to get bias from.
278 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000279 FullyConnectedLayer(unsigned int num_outputs,
280 ITensorAccessorUPtr weights,
281 ITensorAccessorUPtr bias)
282 : _num_outputs(num_outputs), _weights(std::move(weights)), _bias(std::move(bias))
283 {
284 }
285
286 NodeID create_layer(IStream &s) override
287 {
288 NodeParams common_params = { "", s.hints().target_hint };
289 NodeIdxPair input = { s.tail_node(), 0 };
290 return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
291 std::move(_weights), std::move(_bias));
292 }
293
294private:
295 unsigned int _num_outputs;
296 ITensorAccessorUPtr _weights;
297 ITensorAccessorUPtr _bias;
298};
299
300/** Normalization Layer */
301class NormalizationLayer final : public ILayer
302{
303public:
Alex Gildayc357c472018-03-21 13:54:09 +0000304 /** Construct a normalization layer.
305 *
306 * @param[in] norm_info Normalization information.
307 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000308 NormalizationLayer(NormalizationLayerInfo norm_info)
309 : _norm_info(norm_info)
310 {
311 }
312
313 NodeID create_layer(IStream &s) override
314 {
315 NodeParams common_params = { "", s.hints().target_hint };
316 NodeIdxPair input = { s.tail_node(), 0 };
317 return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
318 }
319
320private:
321 NormalizationLayerInfo _norm_info;
322};
323
324/** Pooling Layer */
325class PoolingLayer final : public ILayer
326{
327public:
Alex Gildayc357c472018-03-21 13:54:09 +0000328 /** Construct a pooling layer.
329 *
330 * @param[in] pool_info Pooling information.
331 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000332 PoolingLayer(PoolingLayerInfo pool_info)
333 : _pool_info(pool_info)
334 {
335 }
336
337 NodeID create_layer(IStream &s) override
338 {
339 NodeParams common_params = { "", s.hints().target_hint };
340 NodeIdxPair input = { s.tail_node(), 0 };
341 return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
342 }
343
344private:
345 PoolingLayerInfo _pool_info;
346};
347
348/** Reshape Layer */
349class ReshapeLayer final : public ILayer
350{
351public:
Alex Gildayc357c472018-03-21 13:54:09 +0000352 /** Construct a reshape layer.
353 *
354 * @param[in] shape Target shape.
355 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000356 ReshapeLayer(TensorShape shape)
357 : _shape(shape)
358 {
359 }
360
361 NodeID create_layer(IStream &s) override
362 {
363 NodeParams common_params = { "", s.hints().target_hint };
364 NodeIdxPair input = { s.tail_node(), 0 };
365 return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
366 }
367
368private:
369 TensorShape _shape;
370};
371
372/** Softmax Layer */
373class SoftmaxLayer final : public ILayer
374{
375public:
Alex Gildayc357c472018-03-21 13:54:09 +0000376 /** Construct a softmax layer.
377 *
378 * @param[in] beta (Optional) Beta value. Default 1.0.
379 */
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000380 SoftmaxLayer(float beta = 1.0f)
381 : _beta(beta)
382 {
383 }
384
385 NodeID create_layer(IStream &s) override
386 {
387 NodeParams common_params = { "", s.hints().target_hint };
388 NodeIdxPair input = { s.tail_node(), 0 };
389 return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
390 }
391
392private:
393 float _beta;
394};
395
396/** Branch Layer */
397class BranchLayer final : public ILayer
398{
399public:
Alex Gildayc357c472018-03-21 13:54:09 +0000400 /** Construct a branch layer
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000401 *
402 * @param[in] merge_method Branch merging method
403 * @param[in] sub_stream1 First graph branch
404 * @param[in] sub_stream2 Second graph branch
405 * @param[in] rest_sub_streams Rest sub-graph branches
406 */
407 template <typename... Ts>
408 BranchLayer(BranchMergeMethod merge_method, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
409 : _branch_merge_method(merge_method), _sub_streams()
410 {
411 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream1)));
412 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream2)));
413
414 utility::for_each([&](SubStream && sub_stream)
415 {
416 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
417 },
418 std::move(rest_sub_streams)...);
419 }
Alex Gildayc357c472018-03-21 13:54:09 +0000420 /** Construct a branch layer
Georgios Pinitasd8734b52017-12-22 15:27:52 +0000421 *
422 * @param[in] sub_stream Sub-stream
423 */
424 template <typename... Ts>
425 BranchLayer(SubStream &&sub_stream)
426 : _branch_merge_method(BranchMergeMethod::DEPTH_CONCATENATE), _sub_streams()
427 {
428 _sub_streams.push_back(arm_compute::support::cpp14::make_unique<SubStream>(std::move(sub_stream)));
429 }
430 NodeID create_layer(IStream &s) override
431 {
432 NodeID nid = EmptyNodeID;
433 NodeParams common_params = { "", s.hints().target_hint };
434 if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
435 {
436 nid = _sub_streams[0]->tail_node();
437 }
438 else if(_branch_merge_method == BranchMergeMethod::DEPTH_CONCATENATE)
439 {
440 // Collect tail nodes and perform DepthConcatenate
441 std::vector<NodeIdxPair> nodes;
442 for(auto &ss : _sub_streams)
443 {
444 if(ss && (ss->tail_node() != EmptyNodeID))
445 {
446 const auto tail_node = s.graph().node(ss->tail_node());
447 if(tail_node != nullptr && tail_node->type() != NodeType::Output)
448 {
449 nodes.push_back({ ss->tail_node(), 0 });
450 }
451 }
452 }
453 nid = GraphBuilder::add_depth_concatenate_node(s.graph(), common_params, nodes);
454 }
455 else
456 {
457 ARM_COMPUTE_ERROR_ON(_sub_streams.size() != 2);
458 NodeIdxPair input0 = { _sub_streams[0]->tail_node(), 0 };
459 NodeIdxPair input1 = { _sub_streams[1]->tail_node(), 0 };
460 nid = GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, EltwiseOperation::ADD);
461 }
462 return nid;
463 }
464
465private:
466 BranchMergeMethod _branch_merge_method;
467 std::vector<std::unique_ptr<SubStream>> _sub_streams;
468};
469} // namespace frontend
470} // namespace graph2
471} // namespace arm_compute
472#endif /* __ARM_COMPUTE_GRAPH2_LAYERS_H__ */