blob: 05bd483cfd942e2adc2c2abe4183f1613c237fe4 [file] [log] [blame]
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2018-2020 Arm Limited.
Georgios Pinitasda2491f2018-06-01 17:49:09 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25#define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
Georgios Pinitasda2491f2018-06-01 17:49:09 +010026
27#include "arm_compute/graph/Logger.h"
28#include "arm_compute/graph/Tensor.h"
29#include "arm_compute/graph/TypePrinter.h"
30#include "arm_compute/graph/Types.h"
Georgios Pinitas9e4824c2019-04-12 13:15:58 +010031#include "arm_compute/graph/Utils.h"
giuros01acce5042019-02-21 17:32:34 +000032#include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
Manuel Bottinibffb41e2019-06-20 16:00:27 +010033#include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010034#include "arm_compute/graph/backends/Utils.h"
35#include "arm_compute/graph/nodes/Nodes.h"
36
37#include "arm_compute/core/Error.h"
38#include "arm_compute/core/Helpers.h"
39#include "arm_compute/core/ITensorInfo.h"
Sang-Hoon Park68dd25f2020-10-19 16:00:11 +010040#include "support/Cast.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010041
42namespace arm_compute
43{
44namespace graph
45{
46namespace backends
47{
48namespace detail
49{
Georgios Pinitas0b192e82020-02-20 17:09:28 +000050// Address rule DR-9R5 (1579. Return by converting move constructor)
51#if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5))
52#define RETURN_UNIQUE_PTR(x) (x)
53#else /* defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5)) */
54#define RETURN_UNIQUE_PTR(x) (std::move(x))
55#endif /* defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5)) */
56
Georgios Pinitasda2491f2018-06-01 17:49:09 +010057/** Returns backing tensor of a given tensor
58 *
59 * @tparam TargetInfo Target information
60 *
61 * @param[in] tensor Tensor to extract the backing tensor from
62 *
63 * @return Backing tensor if present else nullptr
64 */
65template <typename TargetInfo>
66typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
67{
68 typename TargetInfo::TensorType *backing_tensor = nullptr;
69 if(tensor != nullptr)
70 {
71 ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
72 // Get backing tensor handle
73 ITensorHandle *tensor_handle = tensor->handle();
74 // Get backing tensor
75 backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
76 }
77
78 return backing_tensor;
79}
80
81template <typename TargetInfo>
82void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
83{
84 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
Pablo Tello32521432018-11-15 14:43:10 +000085 << " Target: " << TargetInfo::TargetType
86 << " ID: " << node.id()
87 << node.name()
Georgios Pinitasda2491f2018-06-01 17:49:09 +010088 << std::endl);
89
90 ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
91 ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
92 ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010093 ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
Georgios Pinitasda2491f2018-06-01 17:49:09 +010094}
95
96/** Creates a backend activation layer function
97 *
98 * @tparam ActivationLayerFunction Backend activation function
99 * @tparam TargetInfo Target-specific information
100 *
101 * @param[in] node Node to create the backend function for
102 *
103 * @return Backend activation layer function
104 */
105template <typename ActivationLayerFunction, typename TargetInfo>
106std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
107{
108 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
109
110 // Extract IO and info
111 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
112 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
113 const ActivationLayerInfo act_info = node.activation_info();
114
115 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000116 auto func = std::make_unique<ActivationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100117 func->configure(input, output, act_info);
118
Pablo Tello32521432018-11-15 14:43:10 +0000119 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
120 << node.name()
121 << " Type: " << node.type()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000122 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100123 << " Data Type: " << input->info()->data_type()
124 << " Shape: " << input->info()->tensor_shape()
125 << " Activation function: " << act_info.activation()
126 << " a: " << act_info.a()
127 << " b: " << act_info.b()
128 << " InPlace : " << is_in_place_operation(input, output)
129 << std::endl);
130
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000131 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100132}
133
thecha01e8f05da2020-08-24 17:21:41 +0100134/** Creates a backend argminmax layer function
135 *
136 * @tparam ArgMinMaxLayerFunction Backend activation function
137 * @tparam TargetInfo Target-specific information
138 *
139 * @param[in] node Node to create the backend function for
140 *
141 * @return Backend argminmax layer function
142 */
143template <typename ArgMinMaxLayerFunction, typename TargetInfo>
144std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
145{
146 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
147
148 // Extract IO and info
149 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
150 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
151 const ReductionOperation op = node.reduction_operation();
152 unsigned int axis = node.axis();
153
154 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000155 auto func = std::make_unique<ArgMinMaxLayerFunction>();
thecha01e8f05da2020-08-24 17:21:41 +0100156 func->configure(input, axis, output, op);
157
158 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
159 << node.name()
160 << " Type: " << node.type()
161 << " Target: " << TargetInfo::TargetType
162 << " Data Type: " << input->info()->data_type()
163 << " Shape: " << input->info()->tensor_shape()
164 << " Reduction Operation: " << op
165 << " axis: " << axis
166 << std::endl);
167
168 return RETURN_UNIQUE_PTR(func);
169}
170
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100171/** Create a backend batch normalization layer function
172 *
173 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
174 * @tparam TargetInfo Target-specific information
175 *
176 * @param[in] node Node to create the backend function for
177 *
178 * @return Backend batch normalization layer function
179 */
180template <typename BatchNormalizationLayerFunction, typename TargetInfo>
181std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
182{
183 validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
184
185 // Extract IO and info
giuros01acce5042019-02-21 17:32:34 +0000186 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
187 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
188 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
189 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
190 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
191
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100192 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
193 const float epsilon = node.epsilon();
194 const ActivationLayerInfo fused_act = node.fused_activation();
195
196 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000197 auto func = std::make_unique<BatchNormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100198 func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
199
200 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000201 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
202 << node.name()
203 << " Type: " << node.type()
204 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100205 << " Data Type: " << input->info()->data_type()
206 << " Shape: " << input->info()->tensor_shape()
207 << " Epsilon: " << epsilon << " "
208 << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
Pablo Tello32521432018-11-15 14:43:10 +0000209 << " InPlace: " << is_in_place_operation(input, output)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100210 << std::endl);
211
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000212 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100213}
214
giuros01acce5042019-02-21 17:32:34 +0000215/** Create a backend batch normalization layer function
216 *
217 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
218 * @tparam TargetInfo Target-specific information
219 *
220 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000221 * @param[in] ctx Graph context
giuros01acce5042019-02-21 17:32:34 +0000222 *
223 * @return Backend batch normalization layer function
224 */
225template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000226std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
giuros01acce5042019-02-21 17:32:34 +0000227{
228 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
229
230 // Extract IO and info
231 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
232 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
233 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
234 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
235 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
236 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
237 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
238
239 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
240
241 const PadStrideInfo conv_info = node.convolution_info();
242 const unsigned int num_groups = node.num_groups();
243 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
244 const ActivationLayerInfo fused_act = node.fused_activation();
245 const float epsilon = node.epsilon();
246
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000247 // Create and configure function (we assume that functions have been validated before creation)
248 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
249 std::unique_ptr<IFunction> func;
250 std::string func_name;
251
252 using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
253
giuros01acce5042019-02-21 17:32:34 +0000254 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000255 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
256 std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
giuros01acce5042019-02-21 17:32:34 +0000257
258 // Log info
259 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
260 << node.name()
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100261 << " Type: " << node.type()
262 << " Target: " << TargetInfo::TargetType
263 << " Data Type: " << input->info()->data_type()
264 << " Input shape: " << input->info()->tensor_shape()
265 << " Weights shape: " << weights->info()->tensor_shape()
266 << " Output shape: " << output->info()->tensor_shape()
267 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
268 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000269 return RETURN_UNIQUE_PTR(func);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100270}
271
272/** Create a backend fused depthwise convolution batch normalization layer function
273 *
274 * @tparam FusedLayerTypes Fused layer types
275 * @tparam TargetInfo Target-specific information
276 *
277 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000278 * @param[in] ctx Graph context
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100279 *
280 * @return Backend fused depthwise convolution batch normalization layer function
281 */
282template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000283std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100284{
285 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
286
287 // Extract IO and info
288 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
289 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
290 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
291 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
292 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
293 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
294 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
295
296 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
297
298 const PadStrideInfo conv_info = node.convolution_info();
299 const unsigned int depth_multiplier = node.depth_multiplier();
300 const ActivationLayerInfo fused_act = node.fused_activation();
301 const float epsilon = node.epsilon();
302
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000303 // Create and configure function (we assume that functions have been validated before creation)
304 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
305 std::unique_ptr<IFunction> func;
306 std::string func_name;
307
308 using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
309
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100310 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000311 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
312 std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100313
314 // Log info
315 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
316 << node.name()
317 << " Type: " << node.type()
giuros01acce5042019-02-21 17:32:34 +0000318 << " Target: " << TargetInfo::TargetType
319 << " Data Type: " << input->info()->data_type()
320 << " Input shape: " << input->info()->tensor_shape()
321 << " Weights shape: " << weights->info()->tensor_shape()
322 << " Output shape: " << output->info()->tensor_shape()
323 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
324 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000325 return RETURN_UNIQUE_PTR(func);
giuros01acce5042019-02-21 17:32:34 +0000326}
327
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100328/** Create a backend bounding box transform layer function
329 *
330 * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
331 * @tparam TargetInfo Target-specific information
332 *
333 * @param[in] node Node to create the backend function for
334 *
335 * @return Backend bounding box transform layer function
336 */
337template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
338std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
339{
340 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
341
342 // Extract IO and info
343 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
344 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
345 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
346 const BoundingBoxTransformInfo bbox_info = node.info();
347
348 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000349 auto func = std::make_unique<BoundingBoxTransformLayerFunction>();
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100350 func->configure(input, output, deltas, bbox_info);
351
352 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000353 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
354 << node.name()
355 << " Type: " << node.type()
356 << " Target: " << TargetInfo::TargetType
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100357 << " Data Type: " << input->info()->data_type()
358 << " Shape: " << input->info()->tensor_shape()
359 << " BoundingBox Info img W: " << bbox_info.img_width() << " "
360 << " BoundingBox Info img H: " << bbox_info.img_height() << " "
361 << std::endl);
362
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000363 return RETURN_UNIQUE_PTR(func);
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100364}
365
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100366/** Create a backend channel shuffle layer function
367 *
368 * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
369 * @tparam TargetInfo Target-specific information
370 *
371 * @param[in] node Node to create the backend function for
372 *
373 * @return Backend channel shuffle layer function
374 */
375template <typename ChannelShuffleLayerFunction, typename TargetInfo>
376std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
377{
378 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
379
380 // Extract IO and info
381 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
382 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
383 const unsigned int num_groups = node.num_groups();
384
385 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000386 auto func = std::make_unique<ChannelShuffleLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100387 func->configure(input, output, num_groups);
388
Pablo Tello32521432018-11-15 14:43:10 +0000389 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
390 << node.name()
391 << " Type: " << node.type()
392 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100393 << " Data Type: " << input->info()->data_type()
394 << " Shape: " << input->info()->tensor_shape()
395 << " Num groups: " << num_groups
396 << std::endl);
397
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000398 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100399}
400
Georgios Pinitase2220552018-07-20 13:23:44 +0100401/** Create a backend layer concatenate function
402 *
403 * @tparam ConcatenateLayerFunction Backend concatenate function
404 * @tparam TargetInfo Target-specific information
405 *
406 * @param[in] node Node to create the backend function for
407 *
408 * @return Backend concatenate layer function
409 */
410template <typename ConcatenateLayerFunction, typename TargetInfo>
411std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
412{
413 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
414 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
415
416 // Return nullptr if depth concatenate is switched off
417 if(!node.is_enabled())
418 {
419 return nullptr;
420 }
421
422 // Extract IO and info
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100423 std::vector<typename TargetInfo::SrcTensorType *> inputs;
Georgios Pinitase2220552018-07-20 13:23:44 +0100424 for(unsigned int i = 0; i < node.num_inputs(); ++i)
425 {
426 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
427 }
428 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas9e4824c2019-04-12 13:15:58 +0100429 const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
430 const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
Georgios Pinitase2220552018-07-20 13:23:44 +0100431
432 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000433 auto func = std::make_unique<ConcatenateLayerFunction>();
Georgios Pinitase2220552018-07-20 13:23:44 +0100434 func->configure(inputs, output, concat_axis);
435
436 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000437 const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
438 std::ostringstream qss;
439 if(is_quantized)
440 {
441 qss << " Output QuantInfo: " << output->info()->quantization_info();
442 }
Pablo Tello32521432018-11-15 14:43:10 +0000443 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
444 << node.name()
445 << " Type: " << node.type()
446 << " Target: " << TargetInfo::TargetType
Georgios Pinitase2220552018-07-20 13:23:44 +0100447 << " Data Type: " << output->info()->data_type()
448 << " Shape: " << output->info()->tensor_shape()
449 << " Num Inputs: " << inputs.size()
450 << " Axis: " << concat_axis
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000451 << qss.str()
Georgios Pinitase2220552018-07-20 13:23:44 +0100452 << std::endl);
453
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000454 return RETURN_UNIQUE_PTR(func);
Georgios Pinitase2220552018-07-20 13:23:44 +0100455}
456
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100457/** Create a backend convolution layer function
458 *
459 * @tparam ConvolutionLayerFunctions Backend convolution functions
460 * @tparam TargetInfo Target-specific information
461 *
462 * @param[in] node Node to create the backend function for
463 * @param[in] ctx Graph context
464 *
465 * @return Backend convolution layer function
466 */
467template <typename ConvolutionLayerFunctions, typename TargetInfo>
468std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
469{
470 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
471
472 // Extract IO and info
473 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
474 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
475 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
476 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
477
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100478 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
479
480 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100481 {
482 biases->info()->set_data_type(DataType::S32);
483 }
484
Georgios Pinitas08346e92018-10-16 19:10:46 +0100485 const PadStrideInfo conv_info = node.convolution_info();
486 const unsigned int num_groups = node.num_groups();
487 const ConvolutionMethod conv_algorithm = node.convolution_method();
488 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
489 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100490
491 // Create and configure function (we assume that functions have been validated before creation)
492 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
493 std::unique_ptr<IFunction> func;
494 std::string func_name;
495
Georgios Pinitase2220552018-07-20 13:23:44 +0100496 if(conv_algorithm == ConvolutionMethod::Winograd)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100497 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100498 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100499 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
500 std::string("WinogradConvolutionLayer"), mm,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100501 input, weights, biases, output, conv_info, fused_act, fast_math);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100502 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100503 else if(conv_algorithm == ConvolutionMethod::Direct)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100504 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100505 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100506 std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
507 std::string("DirectConvolutionLayer"),
Georgios Pinitas08346e92018-10-16 19:10:46 +0100508 input, weights, biases, output, conv_info, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100509 }
510 else if(conv_algorithm == ConvolutionMethod::GEMM)
511 {
512 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
513 std::string("GEMMConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100514 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100515 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100516 }
517 else
518 {
519 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
520 std::string("GenericConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100521 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100522 WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100523 }
524
525 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100526 std::ostringstream qss;
527 if(is_quantized)
528 {
529 qss << " Input QuantInfo: " << input->info()->quantization_info()
530 << " Weights QuantInfo: " << weights->info()->quantization_info()
531 << " Output QuantInfo: " << output->info()->quantization_info();
532 }
Pablo Tello32521432018-11-15 14:43:10 +0000533 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
534 << node.name()
535 << " Type: " << func_name
536 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100537 << " Data Type: " << input->info()->data_type()
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100538 << " Groups: " << num_groups
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100539 << " Input shape: " << input->info()->tensor_shape()
540 << " Weights shape: " << weights->info()->tensor_shape()
541 << " Output shape: " << output->info()->tensor_shape()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000542 << qss.str()
Georgios Pinitas08346e92018-10-16 19:10:46 +0100543 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100544 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000545 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100546}
547
548/** Create a backend deconvolution layer function
549 *
550 * @tparam DeconvolutionLayerFunction Backend deconvolution function
551 * @tparam TargetInfo Target-specific information
552 *
553 * @param[in] node Node to create the backend function for
554 * @param[in] ctx Graph context
555 *
556 * @return Backend deconvolution layer function
557 */
558template <typename DeconvolutionLayerFunction, typename TargetInfo>
559std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
560{
561 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
562
563 // Extract IO and info
564 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
565 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
566 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
567 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
568
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100569 const PadStrideInfo deconv_info = node.deconvolution_info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100570
571 // Create and configure function (we assume that functions have been validated before creation)
572 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
573 std::unique_ptr<IFunction> func;
574
575 std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
576 std::string(), mm,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100577 input, weights, biases, output, deconv_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100578
579 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000580 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
581 << node.name()
582 << " Type: " << node.type()
583 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100584 << " Data Type: " << input->info()->data_type()
585 << " Input shape: " << input->info()->tensor_shape()
586 << " Weights shape: " << weights->info()->tensor_shape()
587 << " Output shape: " << output->info()->tensor_shape()
588 << std::endl);
589 return func;
590}
591
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100592/** Create a backend layer depth-wise convolution function
593 *
594 * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
595 * @tparam TargetInfo Target-specific information
596 *
597 * @param[in] node Node to create the backend function for
598 *
599 * @return Backend depth-wise convolution layer function
600 */
Manuel Bottini05069f02019-09-26 17:18:26 +0100601template <typename DepthwiseConvolutionLayer, typename TargetInfo>
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100602std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
603{
604 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
605
606 // Extract IO and info
607 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
608 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
609 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
610 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
611
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100612 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
613
614 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100615 {
616 biases->info()->set_data_type(DataType::S32);
617 }
618
Manuel Bottini05069f02019-09-26 17:18:26 +0100619 const PadStrideInfo conv_info = node.convolution_info();
620 const unsigned int depth_multiplier = node.depth_multiplier();
621 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100622
623 // Create and configure function (we assume that functions have been validated before creation)
624 std::unique_ptr<IFunction> func;
625 std::string func_name;
Manuel Bottini05069f02019-09-26 17:18:26 +0100626
627 std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
628 std::string("DepthwiseConvolutionLayer"),
629 input, weights, biases, output, conv_info, depth_multiplier, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100630
631 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100632 std::ostringstream qss;
633 if(is_quantized)
634 {
635 qss << " Input QuantInfo: " << input->info()->quantization_info()
636 << " Weights QuantInfo: " << weights->info()->quantization_info()
637 << " Output QuantInfo: " << output->info()->quantization_info();
638 }
Pablo Tello32521432018-11-15 14:43:10 +0000639 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
640 << node.name()
641 << " Type: " << func_name
642 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100643 << " Data Type: " << input->info()->data_type()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100644 << " Input shape: " << input->info()->tensor_shape()
645 << " Weights shape: " << weights->info()->tensor_shape()
646 << " Output shape: " << output->info()->tensor_shape()
Georgios Pinitas05045c12018-12-07 18:31:47 +0000647 << " Depth multiplier: " << depth_multiplier
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000648 << qss.str()
Georgios Pinitas60e98252018-10-22 16:17:20 +0100649 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100650 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000651 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100652}
653
thecha010a05e6a2020-08-28 18:40:38 +0100654/** Create a backend depth to space layer function
655 *
656 * @tparam DepthToSpaceLayerNode Function Backend depth to space function
657 * @tparam TargetInfo Target-specific information
658 *
659 * @param[in] node Node to create the backend function for
660 *
661 * @return Backend depth to space layer function
662 */
663template <typename DepthToSpaceLayerFunction, typename TargetInfo>
664std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
665{
666 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
667
668 // Extract IO and info
669 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
670 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
671
672 ARM_COMPUTE_ERROR_ON(input == nullptr);
673 ARM_COMPUTE_ERROR_ON(output == nullptr);
674
675 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000676 auto func = std::make_unique<DepthToSpaceLayerFunction>();
thecha010a05e6a2020-08-28 18:40:38 +0100677 func->configure(input, output, node.block_shape());
678
679 // Log info
680 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
681 << node.name()
682 << " Type: " << node.type()
683 << " Target: " << TargetInfo::TargetType
684 << " Data Type: " << input->info()->data_type()
685 << " Input shape: " << input->info()->tensor_shape()
686 << " Block Size: " << node.block_shape()
687 << " Output shape: " << output->info()->tensor_shape()
688 << std::endl);
689
690 return RETURN_UNIQUE_PTR(func);
691}
692
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000693/** Create a backend dequantize layer function
694 *
695 * @tparam DequantizationLayer Function Backend dequantize function
696 * @tparam TargetInfo Target-specific information
697 *
698 * @param[in] node Node to create the backend function for
699 *
700 * @return Backend dequantize layer function
701 */
702template <typename DequantizationLayerFunction, typename TargetInfo>
703std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
704{
705 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
706
707 // Extract IO and info
708 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
709 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
710
711 ARM_COMPUTE_ERROR_ON(input == nullptr);
712 ARM_COMPUTE_ERROR_ON(output == nullptr);
713
714 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000715 auto func = std::make_unique<DequantizationLayerFunction>();
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000716 func->configure(input, output);
717
718 // Log info
719 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
720 << node.name()
721 << " Type: " << node.type()
722 << " Target: " << TargetInfo::TargetType
723 << " Data Type: " << input->info()->data_type()
724 << " Input shape: " << input->info()->tensor_shape()
725 << " Input quantization info: " << output->info()->quantization_info()
726 << " Output shape: " << output->info()->tensor_shape()
727 << std::endl);
728
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000729 return RETURN_UNIQUE_PTR(func);
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000730}
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000731/** Create a backend detection output layer function
732 *
733 * @tparam DetectionOutputLayer Function Backend detection output function
734 * @tparam TargetInfo Target-specific information
735 *
736 * @param[in] node Node to create the backend function for
737 *
738 * @return Backend detection output layer function
739 */
740template <typename DetectionOutputLayerFunction, typename TargetInfo>
741std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
742{
743 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
744
745 // Extract IO and info
746 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
747 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
748 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
749 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
750 const DetectionOutputLayerInfo detect_info = node.detection_output_info();
751
752 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
753 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
754 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
755 ARM_COMPUTE_ERROR_ON(output == nullptr);
756
757 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000758 auto func = std::make_unique<DetectionOutputLayerFunction>();
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000759 func->configure(input0, input1, input2, output, detect_info);
760
761 // Log info
762 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
763 << node.name()
764 << " Type: " << node.type()
765 << " Target: " << TargetInfo::TargetType
766 << " Data Type: " << input0->info()->data_type()
767 << " Input0 shape: " << input0->info()->tensor_shape()
768 << " Input1 shape: " << input1->info()->tensor_shape()
769 << " Input2 shape: " << input2->info()->tensor_shape()
770 << " Output shape: " << output->info()->tensor_shape()
771 << " DetectionOutputLayer info: " << detect_info
772 << std::endl);
773
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000774 return RETURN_UNIQUE_PTR(func);
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000775}
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000776
777/** Create a backend detection post process layer function
778 *
779 * @tparam DetectionPostProcessLayerFunction Backend detection output function
780 * @tparam TargetInfo Target-specific information
781 *
782 * @param[in] node Node to create the backend function for
783 *
784 * @return Backend detection post process layer function
785 */
786template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
787std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
788{
789 validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
790
791 // Extract IO and info
792 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
793 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
794 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
795 typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
796 typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
797 typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
798 typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
799 const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
800
801 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
802 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
803 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
804 ARM_COMPUTE_ERROR_ON(output0 == nullptr);
805 ARM_COMPUTE_ERROR_ON(output1 == nullptr);
806 ARM_COMPUTE_ERROR_ON(output2 == nullptr);
807 ARM_COMPUTE_ERROR_ON(output3 == nullptr);
808
809 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000810 auto func = std::make_unique<DetectionPostProcessLayerFunction>();
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000811 func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
812
813 // Log info
814 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
815 << node.name()
816 << " Type: " << node.type()
817 << " Target: " << TargetInfo::TargetType
818 << " Data Type: " << input0->info()->data_type()
819 << " Input0 shape: " << input0->info()->tensor_shape()
820 << " Input1 shape: " << input1->info()->tensor_shape()
821 << " Input2 shape: " << input2->info()->tensor_shape()
822 << " Output0 shape: " << output0->info()->tensor_shape()
823 << " Output1 shape: " << output1->info()->tensor_shape()
824 << " Output2 shape: " << output2->info()->tensor_shape()
825 << " Output3 shape: " << output3->info()->tensor_shape()
826 << " DetectionPostProcessLayer info: " << detect_info
827 << std::endl);
828
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000829 return RETURN_UNIQUE_PTR(func);
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000830}
831
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100832/** Create a backend element-wise operation layer function
833 *
834 * @tparam EltwiseFunctions Backend element-wise function
835 * @tparam TargetInfo Target-specific information
836 *
837 * @param[in] node Node to create the backend function for
838 *
839 * @return Backend element-wise operation layer function
840 */
841template <typename EltwiseFunctions, typename TargetInfo>
842std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
843{
844 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
845
846 // Extract IO and info
847 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
848 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
849 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
850 const EltwiseOperation eltwise_op = node.eltwise_operation();
851 const ConvertPolicy convert_policy = node.convert_policy();
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000852 const ActivationLayerInfo act_info = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100853 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
854 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
855 ARM_COMPUTE_ERROR_ON(output == nullptr);
856
857 std::unique_ptr<IFunction> func = nullptr;
858 std::string func_name;
Georgios Pinitase2220552018-07-20 13:23:44 +0100859 if(eltwise_op == EltwiseOperation::Add)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100860 {
861 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
862 std::string("ArithmeticAddition"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000863 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100864 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100865 else if(eltwise_op == EltwiseOperation::Sub)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100866 {
867 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
868 std::string("ArithmeticSubtraction"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000869 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100870 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100871 else if(eltwise_op == EltwiseOperation::Mul)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100872 {
873 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
874 std::string("PixelWiseMultiplication"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000875 input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100876 }
thecha01f8e35842020-07-28 17:28:17 +0100877 else if(eltwise_op == EltwiseOperation::Max)
878 {
879 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
880 std::string("ElementwiseMaximum"),
881 input1, input2, output, act_info);
882 }
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100883 else
884 {
885 ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
886 }
887
888 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000889 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
890 << node.name()
891 << " Type: " << node.type()
892 << " Target: " << TargetInfo::TargetType
893 << " Operation: " << func_name
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100894 << " Data Type: " << input1->info()->data_type()
Pablo Tello32521432018-11-15 14:43:10 +0000895 << " Shape: " << input1->info()->tensor_shape()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100896 << std::endl);
897
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000898 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100899}
900
Sheri Zhang16dddd22020-05-27 15:03:48 +0100901/** Create a backend unary element-wise operation layer function
902 *
903 * @tparam UnaryEltwiseFunctions Backend unary element-wise function
904 * @tparam TargetInfo Target-specific information
905 *
906 * @param[in] node Node to create the backend function for
907 *
908 * @return Backend unary element-wise operation layer function
909 */
910template <typename UnaryEltwiseFunctions, typename TargetInfo>
911std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
912{
913 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
914
915 // Extract IO and info
916 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
917 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
918 const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
919
920 ARM_COMPUTE_ERROR_ON(input == nullptr);
921 ARM_COMPUTE_ERROR_ON(output == nullptr);
922
923 std::unique_ptr<IFunction> func = nullptr;
924 std::string func_name;
925 if(eltwise_op == UnaryEltwiseOperation::Exp)
926 {
927 std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
928 std::string("Exp"),
929 input, output);
930 }
931 else
932 {
933 ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
934 }
935
936 // Log info
937 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
938 << node.name()
939 << " Type: " << node.type()
940 << " Target: " << TargetInfo::TargetType
941 << " Operation: " << func_name
942 << " Data Type: " << input->info()->data_type()
943 << " Shape: " << input->info()->tensor_shape()
944 << std::endl);
945
946 return RETURN_UNIQUE_PTR(func);
947}
948
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100949/** Create a backend flatten layer function
950 *
951 * @tparam FlattenLayerFunction Backend flatten function
952 * @tparam TargetInfo Target-specific information
953 *
954 * @param[in] node Node to create the backend function for
955 *
956 * @return Backend flatten layer function
957 */
958template <typename FlattenLayerFunction, typename TargetInfo>
959std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
960{
961 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
962
963 // Extract IO and info
964 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
965 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
966
Georgios Pinitase2220552018-07-20 13:23:44 +0100967 ARM_COMPUTE_ERROR_ON(input == nullptr);
968 ARM_COMPUTE_ERROR_ON(output == nullptr);
969
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100970 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000971 auto func = std::make_unique<FlattenLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100972 func->configure(input, output);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100973
974 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000975 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
976 << node.name()
977 << " Type: " << node.type()
978 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100979 << " Data Type: " << input->info()->data_type()
980 << " Input shape: " << input->info()->tensor_shape()
981 << " Output shape: " << output->info()->tensor_shape()
982 << std::endl);
983
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000984 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100985}
986
987/** Create a backend fully connected layer function
988 *
989 * @tparam FullyConnectedLayerFunction Backend fully-connected function
990 * @tparam TargetInfo Target-specific information
991 *
992 * @param[in] node Node to create the backend function for
993 * @param[in] ctx Graph context
994 *
995 * @return Backend fully connected layer function
996 */
997template <typename FullyConnectedLayerFunction, typename TargetInfo>
998std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
999{
1000 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1001
1002 // Extract IO and info
1003 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1004 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
1005 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
1006 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +01001007 const FullyConnectedLayerInfo fc_info = node.info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001008
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001009 ARM_COMPUTE_ERROR_ON(input == nullptr);
1010 ARM_COMPUTE_ERROR_ON(weights == nullptr);
1011 ARM_COMPUTE_ERROR_ON(output == nullptr);
1012
Georgios Pinitase2220552018-07-20 13:23:44 +01001013 // Create and configure function
Michalis Spyrou1a569a32019-09-10 17:20:34 +01001014 auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1015 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001016 auto func = std::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
Georgios Pinitase2220552018-07-20 13:23:44 +01001017 func->configure(input, weights, biases, output, fc_info);
1018
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001019 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1020
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001021 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001022 std::ostringstream qss;
1023 if(is_quantized)
1024 {
1025 qss << " Input QuantInfo: " << input->info()->quantization_info()
1026 << " Weights QuantInfo: " << weights->info()->quantization_info()
1027 << " Output QuantInfo: " << output->info()->quantization_info();
1028 }
Pablo Tello32521432018-11-15 14:43:10 +00001029 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1030 << node.name()
1031 << " Type: " << node.type()
1032 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001033 << " Data Type: " << input->info()->data_type()
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001034 << qss.str()
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001035 << " Input shape: " << input->info()->tensor_shape()
1036 << " Weights shape: " << weights->info()->tensor_shape()
1037 << " Output shape: " << output->info()->tensor_shape()
1038 << std::endl);
1039
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001040 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001041}
1042
Manuel Bottini5209be52019-02-13 16:34:56 +00001043/** Create a backend generate proposals layer function
1044 *
1045 * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1046 * @tparam TargetInfo Target-specific information
1047 *
1048 * @param[in] node Node to create the backend function for
1049 * @param[in] ctx Graph context
1050 *
1051 * @return Backend generate proposals layer function
1052 */
1053template <typename GenerateProposalsLayerFunction, typename TargetInfo>
1054std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1055{
1056 validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1057
1058 // Extract IO and info
1059 typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1060 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1061 typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1062 typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1063 typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1064 typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1065 const GenerateProposalsInfo info = node.info();
1066
1067 ARM_COMPUTE_ERROR_ON(scores == nullptr);
1068 ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1069 ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1070 ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1071 ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1072
1073 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001074 auto func = std::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Manuel Bottini5209be52019-02-13 16:34:56 +00001075 func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1076
1077 // Log info
1078 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1079 << " Target " << TargetInfo::TargetType
1080 << " Data Type: " << scores->info()->data_type()
1081 << " Scores shape: " << scores->info()->tensor_shape()
1082 << " Deltas shape: " << deltas->info()->tensor_shape()
1083 << " Anchors shape: " << anchors->info()->tensor_shape()
1084 << " Proposals shape: " << proposals->info()->tensor_shape()
1085 << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1086 << " Scores Out shape: " << scores_out->info()->tensor_shape()
1087 << std::endl);
1088
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001089 return RETURN_UNIQUE_PTR(func);
Manuel Bottini5209be52019-02-13 16:34:56 +00001090}
1091
thecha013603aff2020-09-01 14:52:38 +01001092/** Create a backend l2 normalization layer function
1093 *
1094 * @tparam NormalizationLayerFunction Backend normalization function
1095 * @tparam TargetInfo Target-specific information
1096 *
1097 * @param[in] node Node to create the backend function for
1098 * @param[in] ctx Graph context
1099 *
1100 * @return Backend normalization layer function
1101 */
1102template <typename L2NormalizeLayerFunction, typename TargetInfo>
1103std::unique_ptr<IFunction> create_l2_normalize_layer(L2NormalizeLayerNode &node, GraphContext &ctx)
1104{
1105 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1106
1107 // Extract IO and info
1108 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1109 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1110 int axis = node.axis();
1111 float epsilon = node.epsilon();
1112
1113 ARM_COMPUTE_ERROR_ON(input == nullptr);
1114 ARM_COMPUTE_ERROR_ON(output == nullptr);
1115
1116 // Create and configure function
1117 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001118 auto func = std::make_unique<L2NormalizeLayerFunction>(mm);
thecha013603aff2020-09-01 14:52:38 +01001119 func->configure(input, output, axis, epsilon);
1120
1121 // Log info
1122 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1123 << node.name()
1124 << " Type: " << node.type()
1125 << " Target: " << TargetInfo::TargetType
1126 << " Data Type: " << input->info()->data_type()
1127 << " Input shape: " << input->info()->tensor_shape()
1128 << " Output shape: " << output->info()->tensor_shape()
1129 << " Axis: " << axis
1130 << " Epsilon: " << epsilon
1131 << std::endl);
1132
1133 return RETURN_UNIQUE_PTR(func);
1134}
1135
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001136/** Create a backend normalization layer function
1137 *
1138 * @tparam NormalizationLayerFunction Backend normalization function
1139 * @tparam TargetInfo Target-specific information
1140 *
1141 * @param[in] node Node to create the backend function for
1142 * @param[in] ctx Graph context
1143 *
1144 * @return Backend normalization layer function
1145 */
1146template <typename NormalizationLayerFunction, typename TargetInfo>
1147std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1148{
1149 ARM_COMPUTE_UNUSED(ctx);
1150
1151 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1152
1153 // Extract IO and info
1154 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1155 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1156 const NormalizationLayerInfo norm_info = node.normalization_info();
1157 ARM_COMPUTE_ERROR_ON(input == nullptr);
1158 ARM_COMPUTE_ERROR_ON(output == nullptr);
1159
1160 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001161 auto func = std::make_unique<NormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001162 func->configure(input, output, norm_info);
1163
1164 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001165 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1166 << node.name()
1167 << " Type: " << node.type()
1168 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001169 << " Data Type: " << input->info()->data_type()
1170 << " Input shape: " << input->info()->tensor_shape()
1171 << " Output shape: " << output->info()->tensor_shape()
1172 << " Normalization info: " << norm_info.type()
1173 << std::endl);
1174
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001175 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001176}
1177
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001178/** Create a backend normalize planar YUV layer function
1179 *
1180 * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1181 * @tparam TargetInfo Target-specific information
1182 *
1183 * @param[in] node Node to create the backend function for
1184 *
1185 * @return Backend normalize plnar YUV layer function
1186 */
1187template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
1188std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1189{
1190 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1191
1192 // Extract IO and info
1193 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1194 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1195 typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1196 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1197 ARM_COMPUTE_ERROR_ON(input == nullptr);
1198 ARM_COMPUTE_ERROR_ON(mean == nullptr);
1199 ARM_COMPUTE_ERROR_ON(std == nullptr);
1200 ARM_COMPUTE_ERROR_ON(output == nullptr);
1201
1202 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001203 auto func = std::make_unique<NormalizePlanarYUVLayerFunction>();
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001204 func->configure(input, output, mean, std);
1205
1206 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001207 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1208 << node.name()
1209 << " Type: " << node.type()
1210 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001211 << " Data Type: " << input->info()->data_type()
1212 << " Shape: " << input->info()->tensor_shape()
1213 << std::endl);
1214
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001215 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001216}
1217
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001218/** Create a backend pad layer function
1219 *
1220 * @tparam PadLayerFunction Backend pad function
1221 * @tparam TargetInfo Target-specific information
1222 *
1223 * @param[in] node Node to create the backend function for
1224 *
1225 * @return Backend pad layer function
1226 */
1227template <typename PadLayerFunction, typename TargetInfo>
1228std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1229{
1230 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1231
1232 // Extract IO and info
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001233 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1234 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1235 const PaddingList &padding = node.padding();
1236 const PixelValue pad_value = node.pad_value();
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001237 ARM_COMPUTE_ERROR_ON(input == nullptr);
1238 ARM_COMPUTE_ERROR_ON(output == nullptr);
1239
1240 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001241 auto func = std::make_unique<PadLayerFunction>();
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001242 func->configure(input, output, padding, pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001243
1244 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001245 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1246 << node.name()
1247 << " Type: " << node.type()
1248 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001249 << " Data Type: " << input->info()->data_type()
1250 << " Input shape: " << input->info()->tensor_shape()
1251 << " Output shape: " << output->info()->tensor_shape()
1252 << std::endl);
1253
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001254 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001255}
1256
Georgios Pinitas57c48242018-08-02 13:41:49 +01001257/** Create a backend permute layer function
1258 *
1259 * @tparam PermuteLayerFunction Backend permute function
1260 * @tparam TargetInfo Target-specific information
1261 *
1262 * @param[in] node Node to create the backend function for
1263 *
1264 * @return Backend permute layer function
1265 */
1266template <typename PermuteLayerFunction, typename TargetInfo>
1267std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1268{
1269 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1270
1271 // Extract IO and info
1272 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1273 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1274 const PermutationVector &perm = node.permutation_vector();
1275 ARM_COMPUTE_ERROR_ON(input == nullptr);
1276 ARM_COMPUTE_ERROR_ON(output == nullptr);
1277
1278 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001279 auto func = std::make_unique<PermuteLayerFunction>();
Georgios Pinitas57c48242018-08-02 13:41:49 +01001280 func->configure(input, output, perm);
1281
1282 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001283 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1284 << node.name()
1285 << " Type: " << node.type()
1286 << " Target: " << TargetInfo::TargetType
Georgios Pinitas57c48242018-08-02 13:41:49 +01001287 << " Data Type: " << input->info()->data_type()
1288 << " Input shape: " << input->info()->tensor_shape()
1289 << " Output shape: " << output->info()->tensor_shape()
1290 << " Permutation vector: " << perm
1291 << std::endl);
1292
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001293 return RETURN_UNIQUE_PTR(func);
Georgios Pinitas57c48242018-08-02 13:41:49 +01001294}
1295
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001296/** Create a backend pooling layer function
1297 *
1298 * @tparam PoolingLayerFunction Backend pooling function
1299 * @tparam TargetInfo Target-specific information
1300 *
1301 * @param[in] node Node to create the backend function for
1302 *
1303 * @return Backend pooling layer function
1304 */
1305template <typename PoolingLayerFunction, typename TargetInfo>
1306std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1307{
1308 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1309
1310 // Extract IO and info
1311 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1312 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1313 const PoolingLayerInfo pool_info = node.pooling_info();
1314 ARM_COMPUTE_ERROR_ON(input == nullptr);
1315 ARM_COMPUTE_ERROR_ON(output == nullptr);
1316
1317 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001318 auto func = std::make_unique<PoolingLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001319 func->configure(input, output, pool_info);
1320
1321 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001322 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1323 << node.name()
1324 << " Type: " << node.type()
1325 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001326 << " Data Type: " << input->info()->data_type()
1327 << " Input shape: " << input->info()->tensor_shape()
1328 << " Output shape: " << output->info()->tensor_shape()
Sang-Hoon Park0cb3da62020-01-15 12:39:56 +00001329 << " Pooling info: " << pool_info.pool_type
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001330 << std::endl);
1331
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001332 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001333}
1334
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001335/** Create a backend PRelu layer function
1336 *
1337 * @tparam PReluFunction Backend PRelu function
1338 * @tparam TargetInfo Target-specific information
1339 *
1340 * @param[in] node Node to create the backend function for
1341 *
1342 * @return Backend PRelu layer function
1343 */
1344template <typename PReluFunction, typename TargetInfo>
1345std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1346{
1347 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1348
1349 // Extract IO and info
1350 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1351 typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1352 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1353 ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1354 ARM_COMPUTE_ERROR_ON(output == nullptr);
1355
1356 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001357 auto func = std::make_unique<PReluFunction>();
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001358 func->configure(input, alpha, output);
1359
1360 // Log info
1361 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1362 << node.name()
1363 << " Type: " << node.type()
1364 << " Target: " << TargetInfo::TargetType
1365 << " Data Type: " << input->info()->data_type()
1366 << " Input shape: " << input->info()->tensor_shape()
1367 << " Output shape: " << output->info()->tensor_shape()
1368 << std::endl);
1369
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001370 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001371}
1372
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001373/** Create a backend print layer function
1374 *
1375 * @tparam TargetInfo Target-specific information
1376 *
1377 * @param[in] node Node to create the backend function for
1378 *
1379 * @return Backend print layer function
1380 */
1381template <typename TargetInfo>
1382std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1383{
1384 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1385
1386 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1387 ARM_COMPUTE_ERROR_ON(input == nullptr);
1388 ARM_COMPUTE_UNUSED(input);
1389
1390 // Log info
1391 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1392 << node.name()
1393 << " Type: " << node.type()
1394 << " Target: " << TargetInfo::TargetType
1395 << " Data Type: " << input->info()->data_type()
1396 << " Input shape: " << input->info()->tensor_shape()
1397 << std::endl);
1398
1399 return nullptr;
1400}
1401
Pablo Tello32521432018-11-15 14:43:10 +00001402/** Create a backend priorbox layer function
1403 *
1404 * @tparam PriorBoxLayerFunction Backend priorbox function
1405 * @tparam TargetInfo Target-specific information
1406 *
1407 * @param[in] node Node to create the backend function for
1408 *
1409 * @return Backend priorbox layer function
1410 */
1411template <typename PriorBoxLayerFunction, typename TargetInfo>
1412std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1413{
1414 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1415
1416 // Extract IO and info
1417 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1418 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1419 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1420 const PriorBoxLayerInfo prior_info = node.priorbox_info();
1421 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1422 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1423 ARM_COMPUTE_ERROR_ON(output == nullptr);
1424
1425 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001426 auto func = std::make_unique<PriorBoxLayerFunction>();
Pablo Tello32521432018-11-15 14:43:10 +00001427 func->configure(input0, input1, output, prior_info);
1428
1429 // Log info
1430 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1431 << node.name()
1432 << " Type: " << node.type()
1433 << " Target: " << TargetInfo::TargetType
1434 << " Data Type: " << input0->info()->data_type()
1435 << " Input0 shape: " << input0->info()->tensor_shape()
1436 << " Input1 shape: " << input1->info()->tensor_shape()
1437 << " Output shape: " << output->info()->tensor_shape()
1438 << " PriorBoxLayer info: " << prior_info
1439 << std::endl);
1440
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001441 return RETURN_UNIQUE_PTR(func);
Pablo Tello32521432018-11-15 14:43:10 +00001442}
1443
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001444/** Create a backend quantization layer function
1445 *
1446 * @tparam QuantizationLayerFunction Backend quantization function
1447 * @tparam TargetInfo Target-specific information
1448 *
1449 * @param[in] node Node to create the backend function for
1450 *
1451 * @return Backend quantization layer function
1452 */
1453template <typename QuantizationLayerFunction, typename TargetInfo>
1454std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1455{
1456 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1457
1458 // Extract IO and info
1459 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1460 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1461 ARM_COMPUTE_ERROR_ON(input == nullptr);
1462 ARM_COMPUTE_ERROR_ON(output == nullptr);
1463
1464 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001465 auto func = std::make_unique<QuantizationLayerFunction>();
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001466 func->configure(input, output);
1467
1468 // Log info
1469 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1470 << node.name()
1471 << " Type: " << node.type()
1472 << " Target: " << TargetInfo::TargetType
1473 << " Data Type: " << input->info()->data_type()
1474 << " Input shape: " << input->info()->tensor_shape()
1475 << " Output shape: " << output->info()->tensor_shape()
1476 << std::endl);
1477
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001478 return RETURN_UNIQUE_PTR(func);
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001479}
1480
thecha01d64444b2020-09-07 14:50:21 +01001481/** Create a backend reduction operation layer function
1482 *
1483 * @tparam ReductionOperationFunction Backend reduction operation function
1484 * @tparam TargetInfo Target-specific information
1485 *
1486 * @param[in] node Node to create the backend function for
1487 * @param[in] ctx Graph context
1488 *
1489 * @return Backend reduction sum layer function
1490 */
1491template <typename ReductionOperationFunction, typename TargetInfo>
1492std::unique_ptr<IFunction> create_reduction_operation_layer(ReductionLayerNode &node, GraphContext &ctx)
1493{
1494 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1495
1496 // Extract IO and info
1497 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1498 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1499 ReductionOperation op = node.op();
1500 int axis = node.axis();
1501 bool keep_dims = node.keep_dims();
1502 ARM_COMPUTE_ERROR_ON(input == nullptr);
1503 ARM_COMPUTE_ERROR_ON(output == nullptr);
1504
1505 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001506 auto func = std::make_unique<ReductionOperationFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
thecha01d64444b2020-09-07 14:50:21 +01001507 func->configure(input, output, axis, op, keep_dims);
1508
1509 // Log info
1510 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1511 << node.name()
1512 << " Type: " << node.type()
1513 << " Target: " << TargetInfo::TargetType
1514 << " Data Type: " << input->info()->data_type()
1515 << " Input shape: " << input->info()->tensor_shape()
1516 << " Output shape: " << output->info()->tensor_shape()
1517 << " Operation: " << op
1518 << " Axis: " << axis
1519 << " Keep dimensions:" << keep_dims
1520 << std::endl);
1521
1522 return RETURN_UNIQUE_PTR(func);
1523}
1524
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001525/** Create a backend reorg layer function
1526 *
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001527 * @tparam ReorgLayerFunction Backend reorg function
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001528 * @tparam TargetInfo Target-specific information
1529 *
1530 * @param[in] node Node to create the backend function for
1531 *
1532 * @return Backend reshape layer function
1533 */
1534template <typename ReorgLayerFunction, typename TargetInfo>
1535std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1536{
1537 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1538
1539 // Extract IO and info
1540 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1541 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1542 ARM_COMPUTE_ERROR_ON(input == nullptr);
1543 ARM_COMPUTE_ERROR_ON(output == nullptr);
1544
1545 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001546 auto func = std::make_unique<ReorgLayerFunction>();
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001547 func->configure(input, output, node.stride());
1548
1549 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001550 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1551 << node.name()
1552 << " Type: " << node.type()
1553 << " Target: " << TargetInfo::TargetType
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001554 << " Data Type: " << input->info()->data_type()
1555 << " Input shape: " << input->info()->tensor_shape()
1556 << " Output shape: " << output->info()->tensor_shape()
1557 << std::endl);
1558
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001559 return RETURN_UNIQUE_PTR(func);
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001560}
1561
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001562/** Create a backend reshape layer function
1563 *
1564 * @tparam ReshapeLayerFunction Backend reshape function
1565 * @tparam TargetInfo Target-specific information
1566 *
1567 * @param[in] node Node to create the backend function for
1568 *
1569 * @return Backend reshape layer function
1570 */
1571template <typename ReshapeLayerFunction, typename TargetInfo>
1572std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1573{
1574 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1575
1576 // Extract IO and info
1577 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1578 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1579 ARM_COMPUTE_ERROR_ON(input == nullptr);
1580 ARM_COMPUTE_ERROR_ON(output == nullptr);
1581
1582 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001583 auto func = std::make_unique<ReshapeLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001584 func->configure(input, output);
1585
1586 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001587 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1588 << node.name()
1589 << " Type: " << node.type()
1590 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001591 << " Data Type: " << input->info()->data_type()
1592 << " Input shape: " << input->info()->tensor_shape()
1593 << " Output shape: " << output->info()->tensor_shape()
1594 << std::endl);
1595
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001596 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001597}
1598
1599/** Create a backend resize layer function
1600 *
1601 * @tparam ResizeLayerFunction Backend resize function
1602 * @tparam TargetInfo Target-specific information
1603 *
1604 * @param[in] node Node to create the backend function for
1605 *
1606 * @return Backend resize layer function
1607 */
1608template <typename ResizeLayerFunction, typename TargetInfo>
1609std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1610{
1611 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1612
1613 // Extract IO and info
1614 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1615 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1616 ARM_COMPUTE_ERROR_ON(input == nullptr);
1617 ARM_COMPUTE_ERROR_ON(output == nullptr);
1618 const InterpolationPolicy policy = node.policy();
1619
1620 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001621 auto func = std::make_unique<ResizeLayerFunction>();
Sang-Hoon Parkccd94962020-06-09 12:09:24 +01001622 func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT });
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001623
1624 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001625 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1626 << node.name()
1627 << " Type: " << node.type()
1628 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001629 << " Data Type: " << input->info()->data_type()
1630 << " Input shape: " << input->info()->tensor_shape()
1631 << " Output shape: " << output->info()->tensor_shape()
1632 << " Interpolation: " << policy
1633 << std::endl);
1634
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001635 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001636}
1637
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001638/** Create a backend ROI align layer function
1639 *
1640 * @tparam ROIAlignLayerFunction ROI Align function
1641 * @tparam TargetInfo Target-specific information
1642 *
1643 * @param[in] node Node to create the backend function for
1644 *
1645 * @return ROI Align layer function
1646 */
1647template <typename ROIAlignLayerFunction, typename TargetInfo>
1648std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1649{
1650 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1651
1652 // Extract IO and info
1653 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1654 typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1655 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1656 ARM_COMPUTE_ERROR_ON(input == nullptr);
1657 ARM_COMPUTE_ERROR_ON(output == nullptr);
1658 ARM_COMPUTE_ERROR_ON(rois == nullptr);
1659
1660 const ROIPoolingLayerInfo pool_info = node.pooling_info();
1661
1662 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001663 auto func = std::make_unique<ROIAlignLayerFunction>();
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001664
1665 func->configure(input, rois, output, pool_info);
1666
1667 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +00001668 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1669 << node.name()
1670 << " Type: " << node.type()
1671 << " Target: " << TargetInfo::TargetType
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001672 << " Data Type: " << input->info()->data_type()
1673 << " Input shape: " << input->info()->tensor_shape()
1674 << " Output shape: " << output->info()->tensor_shape()
1675 << " ROIs shape: " << rois->info()->tensor_shape()
1676 << " ROIPooling width: " << pool_info.pooled_width()
1677 << " ROIPooling height: " << pool_info.pooled_height()
1678 << std::endl);
1679
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001680 return RETURN_UNIQUE_PTR(func);
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001681}
1682
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001683/** Create a backend slice layer function
1684 *
1685 * @tparam SliceLayerFunction Backend slice function
1686 * @tparam TargetInfo Target-specific information
1687 *
1688 * @param[in] node Node to create the backend function for
1689 *
1690 * @return Backend slice layer function
1691 */
1692template <typename SliceLayerFunction, typename TargetInfo>
1693std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1694{
1695 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1696
1697 // Extract IO and info
1698 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1699 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1700 ARM_COMPUTE_ERROR_ON(input == nullptr);
1701 ARM_COMPUTE_ERROR_ON(output == nullptr);
1702
1703 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001704 auto func = std::make_unique<SliceLayerFunction>();
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001705 func->configure(input, output, node.starts(), node.ends());
1706
1707 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001708 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1709 << node.name()
1710 << " Type: " << node.type()
1711 << " Target: " << TargetInfo::TargetType
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001712 << " Data Type: " << input->info()->data_type()
1713 << " Input shape: " << input->info()->tensor_shape()
1714 << " Output shape: " << output->info()->tensor_shape()
1715 << std::endl);
1716
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001717 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001718}
1719
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001720/** Create a backend softmax layer function
1721 *
1722 * @tparam SoftmaxLayerFunction Backend softmax function
1723 * @tparam TargetInfo Target-specific information
1724 *
1725 * @param[in] node Node to create the backend function for
1726 * @param[in] ctx Graph context
1727 *
1728 * @return Backend softmax layer function
1729 */
1730template <typename SoftmaxLayerFunction, typename TargetInfo>
1731std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1732{
1733 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1734
1735 // Extract IO and info
1736 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1737 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1738 const float beta = node.beta();
1739 ARM_COMPUTE_ERROR_ON(input == nullptr);
1740 ARM_COMPUTE_ERROR_ON(output == nullptr);
1741
1742 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001743 auto func = std::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001744 func->configure(input, output, beta);
1745
1746 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001747 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1748 << node.name()
1749 << " Type: " << node.type()
1750 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001751 << " Data Type: " << input->info()->data_type()
1752 << " Input shape: " << input->info()->tensor_shape()
1753 << " Output shape: " << output->info()->tensor_shape()
1754 << std::endl);
1755
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001756 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001757}
Michele Di Giorgioec699752019-03-22 15:25:32 +00001758
1759/** Create a backend layer stack function
1760 *
1761 * @tparam StackLayerFunction Backend stack function
1762 * @tparam TargetInfo Target-specific information
1763 *
1764 * @param[in] node Node to create the backend function for
1765 *
1766 * @return Backend stack layer function
1767 */
1768template <typename StackLayerFunction, typename TargetInfo>
1769std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1770{
1771 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1772 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1773
1774 // Extract IO and info
1775 std::vector<typename TargetInfo::TensorType *> inputs;
1776 for(unsigned int i = 0; i < node.num_inputs(); ++i)
1777 {
1778 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1779 }
1780 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1781 const int axis = node.axis();
1782
1783 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001784 auto func = std::make_unique<StackLayerFunction>();
Michele Di Giorgioec699752019-03-22 15:25:32 +00001785 func->configure(inputs, axis, output);
1786
1787 // Log info
1788 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1789 << node.name()
1790 << " Type: " << node.type()
1791 << " Target: " << TargetInfo::TargetType
1792 << " Data Type: " << output->info()->data_type()
1793 << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1794 << " Output shape: " << output->info()->tensor_shape()
1795 << " Num Inputs: " << inputs.size()
1796 << " Axis: " << axis
1797 << std::endl);
1798
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001799 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgioec699752019-03-22 15:25:32 +00001800}
thecha012bfadd92020-08-12 17:25:51 +01001801
1802/** Create a backend slice layer function
1803 *
1804 * @tparam StridedSliceLayerFunction Backend strided slice function
1805 * @tparam TargetInfo Target-specific information
1806 *
1807 * @param[in] node Node to create the backend function for
1808 *
1809 * @return Backend strided slice layer function
1810 */
1811template <typename StridedSliceLayerFunction, typename TargetInfo>
1812std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1813{
1814 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1815
1816 // Extract IO and info
1817 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1818 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1819 Coordinates starts = node.starts();
1820 Coordinates ends = node.ends();
1821 BiStrides strides = node.strides();
1822 StridedSliceLayerInfo info = node.strided_slice_info();
1823
1824 ARM_COMPUTE_ERROR_ON(input == nullptr);
1825 ARM_COMPUTE_ERROR_ON(output == nullptr);
1826
1827 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001828 auto func = std::make_unique<StridedSliceLayerFunction>();
thecha012bfadd92020-08-12 17:25:51 +01001829 func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
1830
1831 // Log info
1832 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1833 << node.name()
1834 << " Type: " << node.type()
1835 << " Target: " << TargetInfo::TargetType
1836 << " Data Type: " << input->info()->data_type()
1837 << " Input shape: " << input->info()->tensor_shape()
1838 << " Output shape: " << output->info()->tensor_shape()
1839 << std::endl);
1840
1841 return RETURN_UNIQUE_PTR(func);
1842}
1843
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001844/** Create a backend Upsample layer function
1845 *
1846 * @tparam UpsampleLayerFunction Backend Upsample function
1847 * @tparam TargetInfo Target-specific information
1848 *
1849 * @param[in] node Node to create the backend function for
1850 * @param[in] ctx Graph context
1851 *
1852 * @return Backend Upsample layer function
1853 */
1854template <typename UpsampleLayerFunction, typename TargetInfo>
1855std::unique_ptr<IFunction> create_upsample_layer(UpsampleLayerNode &node, GraphContext &ctx)
1856{
Michalis Spyrou6bff1952019-10-02 17:22:11 +01001857 ARM_COMPUTE_UNUSED(ctx);
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001858 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1859
1860 // Extract IO and info
1861 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1862 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1863 const Size2D info = node.info();
1864 const InterpolationPolicy upsampling_policy = node.upsampling_policy();
1865 ARM_COMPUTE_ERROR_ON(upsampling_policy != InterpolationPolicy::NEAREST_NEIGHBOR);
1866 ARM_COMPUTE_ERROR_ON(info.x() != 2 || info.y() != 2);
1867 ARM_COMPUTE_ERROR_ON(input == nullptr);
1868 ARM_COMPUTE_ERROR_ON(output == nullptr);
1869
1870 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001871 auto func = std::make_unique<UpsampleLayerFunction>();
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001872 func->configure(input, output, info, upsampling_policy);
1873
1874 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001875 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1876 << node.name()
1877 << " Type: " << node.type()
1878 << " Target: " << TargetInfo::TargetType
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001879 << " Data Type: " << input->info()->data_type()
1880 << " Input shape: " << input->info()->tensor_shape()
1881 << " Output shape: " << output->info()->tensor_shape()
1882 << " Strides: " << info
1883 << " Upsampling policy: " << upsampling_policy
1884 << std::endl);
1885
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001886 return RETURN_UNIQUE_PTR(func);
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001887}
Michalis Spyrou96f67692018-09-13 11:39:28 +01001888/** Create a backend YOLO layer function
1889 *
1890 * @tparam YoloLayerFunction Backend YOLO function
1891 * @tparam TargetInfo Target-specific information
1892 *
1893 * @param[in] node Node to create the backend function for
1894 * @param[in] ctx Graph context
1895 *
1896 * @return Backend YOLO layer function
1897 */
1898template <typename YOLOlayerFunction, typename TargetInfo>
1899std::unique_ptr<IFunction> create_yolo_layer(YOLOLayerNode &node, GraphContext &ctx)
1900{
Michalis Spyrou6bff1952019-10-02 17:22:11 +01001901 ARM_COMPUTE_UNUSED(ctx);
Michalis Spyrou96f67692018-09-13 11:39:28 +01001902 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1903
1904 // Extract IO and info
1905 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1906 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1907 const ActivationLayerInfo act_info = node.activation_info();
1908 const int32_t num_classes = node.num_classes();
1909 ARM_COMPUTE_ERROR_ON(num_classes <= 0);
1910 ARM_COMPUTE_ERROR_ON(input == nullptr);
1911 ARM_COMPUTE_ERROR_ON(output == nullptr);
1912
1913 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001914 auto func = std::make_unique<YOLOlayerFunction>();
Michalis Spyrou96f67692018-09-13 11:39:28 +01001915 func->configure(input, output, act_info, num_classes);
1916
1917 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001918 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1919 << node.name()
1920 << " Type: " << node.type()
1921 << " Target: " << TargetInfo::TargetType
Michalis Spyrou96f67692018-09-13 11:39:28 +01001922 << " Data Type: " << input->info()->data_type()
1923 << " Input shape: " << input->info()->tensor_shape()
1924 << " Output shape: " << output->info()->tensor_shape()
1925 << " Activation function: " << act_info.activation()
1926 << " Num classes: " << num_classes
1927 << std::endl);
1928
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001929 return RETURN_UNIQUE_PTR(func);
Michalis Spyrou96f67692018-09-13 11:39:28 +01001930}
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001931} // namespace detail
1932} // namespace backends
1933} // namespace graph
1934} // namespace arm_compute
1935
Michalis Spyrouf4643372019-11-29 16:17:13 +00001936#endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */