blob: 6bec66a6ff512089fe0d3649db8a635866181dce [file] [log] [blame]
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001/*
Alessandro Navone6413e492021-02-02 11:39:05 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasda2491f2018-06-01 17:49:09 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25#define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
Georgios Pinitasda2491f2018-06-01 17:49:09 +010026
27#include "arm_compute/graph/Logger.h"
28#include "arm_compute/graph/Tensor.h"
29#include "arm_compute/graph/TypePrinter.h"
30#include "arm_compute/graph/Types.h"
Georgios Pinitas9e4824c2019-04-12 13:15:58 +010031#include "arm_compute/graph/Utils.h"
giuros01acce5042019-02-21 17:32:34 +000032#include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
Manuel Bottinibffb41e2019-06-20 16:00:27 +010033#include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010034#include "arm_compute/graph/backends/Utils.h"
35#include "arm_compute/graph/nodes/Nodes.h"
36
37#include "arm_compute/core/Error.h"
38#include "arm_compute/core/Helpers.h"
39#include "arm_compute/core/ITensorInfo.h"
Sang-Hoon Park68dd25f2020-10-19 16:00:11 +010040#include "support/Cast.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010041
42namespace arm_compute
43{
44namespace graph
45{
46namespace backends
47{
48namespace detail
49{
50/** Returns backing tensor of a given tensor
51 *
52 * @tparam TargetInfo Target information
53 *
54 * @param[in] tensor Tensor to extract the backing tensor from
55 *
56 * @return Backing tensor if present else nullptr
57 */
58template <typename TargetInfo>
59typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
60{
61 typename TargetInfo::TensorType *backing_tensor = nullptr;
62 if(tensor != nullptr)
63 {
64 ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
65 // Get backing tensor handle
66 ITensorHandle *tensor_handle = tensor->handle();
67 // Get backing tensor
68 backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
69 }
70
71 return backing_tensor;
72}
73
74template <typename TargetInfo>
75void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
76{
77 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
Pablo Tello32521432018-11-15 14:43:10 +000078 << " Target: " << TargetInfo::TargetType
79 << " ID: " << node.id()
80 << node.name()
Georgios Pinitasda2491f2018-06-01 17:49:09 +010081 << std::endl);
82
83 ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
84 ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
85 ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010086 ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
Georgios Pinitasda2491f2018-06-01 17:49:09 +010087}
88
89/** Creates a backend activation layer function
90 *
91 * @tparam ActivationLayerFunction Backend activation function
92 * @tparam TargetInfo Target-specific information
93 *
94 * @param[in] node Node to create the backend function for
95 *
96 * @return Backend activation layer function
97 */
98template <typename ActivationLayerFunction, typename TargetInfo>
99std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
100{
101 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
102
103 // Extract IO and info
104 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
105 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
106 const ActivationLayerInfo act_info = node.activation_info();
107
108 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000109 auto func = std::make_unique<ActivationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100110 func->configure(input, output, act_info);
111
Pablo Tello32521432018-11-15 14:43:10 +0000112 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
113 << node.name()
114 << " Type: " << node.type()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000115 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100116 << " Data Type: " << input->info()->data_type()
117 << " Shape: " << input->info()->tensor_shape()
118 << " Activation function: " << act_info.activation()
119 << " a: " << act_info.a()
120 << " b: " << act_info.b()
121 << " InPlace : " << is_in_place_operation(input, output)
122 << std::endl);
123
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100124 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100125}
126
thecha01e8f05da2020-08-24 17:21:41 +0100127/** Creates a backend argminmax layer function
128 *
129 * @tparam ArgMinMaxLayerFunction Backend activation function
130 * @tparam TargetInfo Target-specific information
131 *
132 * @param[in] node Node to create the backend function for
133 *
134 * @return Backend argminmax layer function
135 */
136template <typename ArgMinMaxLayerFunction, typename TargetInfo>
137std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
138{
139 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
140
141 // Extract IO and info
142 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
143 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
144 const ReductionOperation op = node.reduction_operation();
145 unsigned int axis = node.axis();
146
147 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000148 auto func = std::make_unique<ArgMinMaxLayerFunction>();
thecha01e8f05da2020-08-24 17:21:41 +0100149 func->configure(input, axis, output, op);
150
151 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
152 << node.name()
153 << " Type: " << node.type()
154 << " Target: " << TargetInfo::TargetType
155 << " Data Type: " << input->info()->data_type()
156 << " Shape: " << input->info()->tensor_shape()
157 << " Reduction Operation: " << op
158 << " axis: " << axis
159 << std::endl);
160
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100161 return std::move(func);
thecha01e8f05da2020-08-24 17:21:41 +0100162}
163
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100164/** Create a backend batch normalization layer function
165 *
166 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
167 * @tparam TargetInfo Target-specific information
168 *
169 * @param[in] node Node to create the backend function for
170 *
171 * @return Backend batch normalization layer function
172 */
173template <typename BatchNormalizationLayerFunction, typename TargetInfo>
174std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
175{
176 validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
177
178 // Extract IO and info
giuros01acce5042019-02-21 17:32:34 +0000179 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
180 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
181 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
182 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
183 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
184
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100185 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
186 const float epsilon = node.epsilon();
187 const ActivationLayerInfo fused_act = node.fused_activation();
188
189 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000190 auto func = std::make_unique<BatchNormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100191 func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
192
193 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000194 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
195 << node.name()
196 << " Type: " << node.type()
197 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100198 << " Data Type: " << input->info()->data_type()
199 << " Shape: " << input->info()->tensor_shape()
200 << " Epsilon: " << epsilon << " "
201 << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
Pablo Tello32521432018-11-15 14:43:10 +0000202 << " InPlace: " << is_in_place_operation(input, output)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100203 << std::endl);
204
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100205 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100206}
207
giuros01acce5042019-02-21 17:32:34 +0000208/** Create a backend batch normalization layer function
209 *
210 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
211 * @tparam TargetInfo Target-specific information
212 *
213 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000214 * @param[in] ctx Graph context
giuros01acce5042019-02-21 17:32:34 +0000215 *
216 * @return Backend batch normalization layer function
217 */
218template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000219std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
giuros01acce5042019-02-21 17:32:34 +0000220{
221 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
222
223 // Extract IO and info
224 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
225 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
226 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
227 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
228 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
229 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
230 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
231
232 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
233
234 const PadStrideInfo conv_info = node.convolution_info();
235 const unsigned int num_groups = node.num_groups();
236 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
237 const ActivationLayerInfo fused_act = node.fused_activation();
238 const float epsilon = node.epsilon();
239
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000240 // Create and configure function (we assume that functions have been validated before creation)
241 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
242 std::unique_ptr<IFunction> func;
243 std::string func_name;
244
245 using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
246
giuros01acce5042019-02-21 17:32:34 +0000247 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000248 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
249 std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
giuros01acce5042019-02-21 17:32:34 +0000250
251 // Log info
252 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
253 << node.name()
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100254 << " Type: " << node.type()
255 << " Target: " << TargetInfo::TargetType
256 << " Data Type: " << input->info()->data_type()
257 << " Input shape: " << input->info()->tensor_shape()
258 << " Weights shape: " << weights->info()->tensor_shape()
259 << " Output shape: " << output->info()->tensor_shape()
260 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
261 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100262 return std::move(func);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100263}
264
265/** Create a backend fused depthwise convolution batch normalization layer function
266 *
267 * @tparam FusedLayerTypes Fused layer types
268 * @tparam TargetInfo Target-specific information
269 *
270 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000271 * @param[in] ctx Graph context
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100272 *
273 * @return Backend fused depthwise convolution batch normalization layer function
274 */
275template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000276std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100277{
278 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
279
280 // Extract IO and info
281 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
282 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
283 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
284 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
285 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
286 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
287 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
288
289 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
290
291 const PadStrideInfo conv_info = node.convolution_info();
292 const unsigned int depth_multiplier = node.depth_multiplier();
293 const ActivationLayerInfo fused_act = node.fused_activation();
294 const float epsilon = node.epsilon();
295
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000296 // Create and configure function (we assume that functions have been validated before creation)
297 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
298 std::unique_ptr<IFunction> func;
299 std::string func_name;
300
301 using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
302
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100303 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000304 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
305 std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100306
307 // Log info
308 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
309 << node.name()
310 << " Type: " << node.type()
giuros01acce5042019-02-21 17:32:34 +0000311 << " Target: " << TargetInfo::TargetType
312 << " Data Type: " << input->info()->data_type()
313 << " Input shape: " << input->info()->tensor_shape()
314 << " Weights shape: " << weights->info()->tensor_shape()
315 << " Output shape: " << output->info()->tensor_shape()
316 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
317 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100318 return std::move(func);
giuros01acce5042019-02-21 17:32:34 +0000319}
320
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100321/** Create a backend bounding box transform layer function
322 *
323 * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
324 * @tparam TargetInfo Target-specific information
325 *
326 * @param[in] node Node to create the backend function for
327 *
328 * @return Backend bounding box transform layer function
329 */
330template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
331std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
332{
333 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
334
335 // Extract IO and info
336 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
337 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
338 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
339 const BoundingBoxTransformInfo bbox_info = node.info();
340
341 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000342 auto func = std::make_unique<BoundingBoxTransformLayerFunction>();
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100343 func->configure(input, output, deltas, bbox_info);
344
345 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000346 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
347 << node.name()
348 << " Type: " << node.type()
349 << " Target: " << TargetInfo::TargetType
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100350 << " Data Type: " << input->info()->data_type()
351 << " Shape: " << input->info()->tensor_shape()
352 << " BoundingBox Info img W: " << bbox_info.img_width() << " "
353 << " BoundingBox Info img H: " << bbox_info.img_height() << " "
354 << std::endl);
355
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100356 return std::move(func);
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100357}
358
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100359/** Create a backend channel shuffle layer function
360 *
361 * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
362 * @tparam TargetInfo Target-specific information
363 *
364 * @param[in] node Node to create the backend function for
365 *
366 * @return Backend channel shuffle layer function
367 */
368template <typename ChannelShuffleLayerFunction, typename TargetInfo>
369std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
370{
371 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
372
373 // Extract IO and info
374 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
375 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
376 const unsigned int num_groups = node.num_groups();
377
378 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000379 auto func = std::make_unique<ChannelShuffleLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100380 func->configure(input, output, num_groups);
381
Pablo Tello32521432018-11-15 14:43:10 +0000382 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
383 << node.name()
384 << " Type: " << node.type()
385 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100386 << " Data Type: " << input->info()->data_type()
387 << " Shape: " << input->info()->tensor_shape()
388 << " Num groups: " << num_groups
389 << std::endl);
390
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100391 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100392}
393
Georgios Pinitase2220552018-07-20 13:23:44 +0100394/** Create a backend layer concatenate function
395 *
396 * @tparam ConcatenateLayerFunction Backend concatenate function
397 * @tparam TargetInfo Target-specific information
398 *
399 * @param[in] node Node to create the backend function for
400 *
401 * @return Backend concatenate layer function
402 */
403template <typename ConcatenateLayerFunction, typename TargetInfo>
404std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
405{
406 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
407 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
408
409 // Return nullptr if depth concatenate is switched off
410 if(!node.is_enabled())
411 {
412 return nullptr;
413 }
414
415 // Extract IO and info
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100416 std::vector<typename TargetInfo::SrcTensorType *> inputs;
Georgios Pinitase2220552018-07-20 13:23:44 +0100417 for(unsigned int i = 0; i < node.num_inputs(); ++i)
418 {
419 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
420 }
421 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas9e4824c2019-04-12 13:15:58 +0100422 const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
423 const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
Georgios Pinitase2220552018-07-20 13:23:44 +0100424
425 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000426 auto func = std::make_unique<ConcatenateLayerFunction>();
Georgios Pinitase2220552018-07-20 13:23:44 +0100427 func->configure(inputs, output, concat_axis);
428
429 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000430 const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
431 std::ostringstream qss;
432 if(is_quantized)
433 {
434 qss << " Output QuantInfo: " << output->info()->quantization_info();
435 }
Pablo Tello32521432018-11-15 14:43:10 +0000436 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
437 << node.name()
438 << " Type: " << node.type()
439 << " Target: " << TargetInfo::TargetType
Georgios Pinitase2220552018-07-20 13:23:44 +0100440 << " Data Type: " << output->info()->data_type()
441 << " Shape: " << output->info()->tensor_shape()
442 << " Num Inputs: " << inputs.size()
443 << " Axis: " << concat_axis
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000444 << qss.str()
Georgios Pinitase2220552018-07-20 13:23:44 +0100445 << std::endl);
446
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100447 return std::move(func);
Georgios Pinitase2220552018-07-20 13:23:44 +0100448}
449
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100450/** Create a backend convolution layer function
451 *
452 * @tparam ConvolutionLayerFunctions Backend convolution functions
Sheri Zhangfb228032021-11-02 10:45:07 +0000453 * @tparam TargetInfo Target-specific information
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100454 *
455 * @param[in] node Node to create the backend function for
456 * @param[in] ctx Graph context
457 *
458 * @return Backend convolution layer function
459 */
460template <typename ConvolutionLayerFunctions, typename TargetInfo>
461std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
462{
463 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
464
465 // Extract IO and info
466 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
467 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
468 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
469 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
470
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100471 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
472
473 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100474 {
475 biases->info()->set_data_type(DataType::S32);
476 }
477
Georgios Pinitas08346e92018-10-16 19:10:46 +0100478 const PadStrideInfo conv_info = node.convolution_info();
479 const unsigned int num_groups = node.num_groups();
480 const ConvolutionMethod conv_algorithm = node.convolution_method();
481 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
482 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100483
484 // Create and configure function (we assume that functions have been validated before creation)
485 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
486 std::unique_ptr<IFunction> func;
487 std::string func_name;
488
Georgios Pinitase2220552018-07-20 13:23:44 +0100489 if(conv_algorithm == ConvolutionMethod::Winograd)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100490 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100491 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100492 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
493 std::string("WinogradConvolutionLayer"), mm,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100494 input, weights, biases, output, conv_info, fused_act, fast_math);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100495 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100496 else if(conv_algorithm == ConvolutionMethod::Direct)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100497 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100498 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100499 std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
500 std::string("DirectConvolutionLayer"),
Georgios Pinitas08346e92018-10-16 19:10:46 +0100501 input, weights, biases, output, conv_info, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100502 }
503 else if(conv_algorithm == ConvolutionMethod::GEMM)
504 {
505 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
506 std::string("GEMMConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100507 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100508 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100509 }
510 else
511 {
512 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
513 std::string("GenericConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100514 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100515 WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100516 }
517
518 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100519 std::ostringstream qss;
520 if(is_quantized)
521 {
522 qss << " Input QuantInfo: " << input->info()->quantization_info()
523 << " Weights QuantInfo: " << weights->info()->quantization_info()
524 << " Output QuantInfo: " << output->info()->quantization_info();
525 }
Pablo Tello32521432018-11-15 14:43:10 +0000526 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
527 << node.name()
528 << " Type: " << func_name
529 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100530 << " Data Type: " << input->info()->data_type()
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100531 << " Groups: " << num_groups
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100532 << " Input shape: " << input->info()->tensor_shape()
533 << " Weights shape: " << weights->info()->tensor_shape()
534 << " Output shape: " << output->info()->tensor_shape()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000535 << qss.str()
Georgios Pinitas08346e92018-10-16 19:10:46 +0100536 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100537 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100538 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100539}
540
Sheri Zhangfb228032021-11-02 10:45:07 +0000541/** Create a backend convolution layer function with post opreator
542 *
543 * @tparam ConvolutionLayerFunctions Backend convolution functions
544 * @tparam TargetInfo Target-specific information
545 *
546 * @param[in] node Node to create the backend function for
547 * @param[in] ctx Graph context
548 *
549 * @return Backend convolution layer function
550 */
551template <typename ConvolutionLayerFunctions, typename TargetInfo>
552std::unique_ptr<IFunction> create_fused_convolution_with_post_op(FusedConvolutionWithPostOpNode &node, GraphContext &ctx)
553{
554 validate_node<TargetInfo>(node, 4 /* expected inputs */, 1 /* expected outputs */);
555
556 // Extract IO and info
557 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
558 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
559 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
560 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
561
562 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
563
564 if(is_quantized)
565 {
566 biases->info()->set_data_type(DataType::S32);
567 }
568
569 const PadStrideInfo conv_info = node.convolution_info();
570 const unsigned int num_groups = node.num_groups();
571 const ActivationLayerInfo fused_act = node.fused_activation();
572
573 experimental::PostOpList<typename TargetInfo::TensorType *> post_ops;
574
575 auto &post_op_info_list = node.post_op_info_list();
576 for(const auto &post_op_info : post_op_info_list)
577 {
578 switch(post_op_info->type())
579 {
580 case PostOpType::Activation:
581 {
582 const auto act_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoActivation *>(post_op_info.get());
583 post_ops.template push_back_op<experimental::PostOpAct<typename TargetInfo::TensorType *>>(act_info->_act);
584 break;
585 }
586 case PostOpType::Eltwise_Add:
587 {
588 typename TargetInfo::TensorType *add_input = get_backing_tensor<TargetInfo>(node.input(3));
589 const auto eltwise_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoEltwiseAdd *>(post_op_info.get());
590 post_ops.template push_back_op<experimental::PostOpEltwiseAdd<typename TargetInfo::TensorType *>>(add_input, eltwise_info->_prev_op_dst_pos, eltwise_info->_policy);
591 break;
592 }
593 default:
594 {
595 ARM_COMPUTE_ERROR("Unsupported PostOpType");
596 }
597 }
598 }
599
600 // Create and configure function (we assume that functions have been validated before creation)
601 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
602 std::unique_ptr<IFunction> func;
603 std::string func_name;
604
605 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
606 std::string("GEMMConvolutionLayer"), mm,
607 input, weights, biases, output, conv_info,
608 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups, post_ops);
609
610 // Log info
611 std::ostringstream qss;
612 if(is_quantized)
613 {
614 qss << " Input QuantInfo: " << input->info()->quantization_info()
615 << " Weights QuantInfo: " << weights->info()->quantization_info()
616 << " Output QuantInfo: " << output->info()->quantization_info();
617 }
618 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
619 << node.name()
620 << " Type: " << func_name
621 << " Target: " << TargetInfo::TargetType
622 << " Data Type: " << input->info()->data_type()
623 << " Groups: " << num_groups
624 << " Input shape: " << input->info()->tensor_shape()
625 << " Weights shape: " << weights->info()->tensor_shape()
626 << " Output shape: " << output->info()->tensor_shape()
627 << qss.str()
628 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
629 << std::endl);
630 return std::move(func);
631}
632
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100633/** Create a backend deconvolution layer function
634 *
635 * @tparam DeconvolutionLayerFunction Backend deconvolution function
636 * @tparam TargetInfo Target-specific information
637 *
638 * @param[in] node Node to create the backend function for
639 * @param[in] ctx Graph context
640 *
641 * @return Backend deconvolution layer function
642 */
643template <typename DeconvolutionLayerFunction, typename TargetInfo>
644std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
645{
646 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
647
648 // Extract IO and info
649 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
650 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
651 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
652 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
653
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100654 const PadStrideInfo deconv_info = node.deconvolution_info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100655
656 // Create and configure function (we assume that functions have been validated before creation)
657 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
658 std::unique_ptr<IFunction> func;
659
660 std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
661 std::string(), mm,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100662 input, weights, biases, output, deconv_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100663
664 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000665 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
666 << node.name()
667 << " Type: " << node.type()
668 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100669 << " Data Type: " << input->info()->data_type()
670 << " Input shape: " << input->info()->tensor_shape()
671 << " Weights shape: " << weights->info()->tensor_shape()
672 << " Output shape: " << output->info()->tensor_shape()
673 << std::endl);
674 return func;
675}
676
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100677/** Create a backend layer depth-wise convolution function
678 *
679 * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
680 * @tparam TargetInfo Target-specific information
681 *
682 * @param[in] node Node to create the backend function for
683 *
684 * @return Backend depth-wise convolution layer function
685 */
Manuel Bottini05069f02019-09-26 17:18:26 +0100686template <typename DepthwiseConvolutionLayer, typename TargetInfo>
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100687std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
688{
689 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
690
691 // Extract IO and info
692 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
693 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
694 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
695 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
696
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100697 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
698
699 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100700 {
701 biases->info()->set_data_type(DataType::S32);
702 }
703
Manuel Bottini05069f02019-09-26 17:18:26 +0100704 const PadStrideInfo conv_info = node.convolution_info();
705 const unsigned int depth_multiplier = node.depth_multiplier();
706 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100707
708 // Create and configure function (we assume that functions have been validated before creation)
709 std::unique_ptr<IFunction> func;
710 std::string func_name;
Manuel Bottini05069f02019-09-26 17:18:26 +0100711
712 std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
713 std::string("DepthwiseConvolutionLayer"),
714 input, weights, biases, output, conv_info, depth_multiplier, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100715
716 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100717 std::ostringstream qss;
718 if(is_quantized)
719 {
720 qss << " Input QuantInfo: " << input->info()->quantization_info()
721 << " Weights QuantInfo: " << weights->info()->quantization_info()
722 << " Output QuantInfo: " << output->info()->quantization_info();
723 }
Pablo Tello32521432018-11-15 14:43:10 +0000724 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
725 << node.name()
726 << " Type: " << func_name
727 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100728 << " Data Type: " << input->info()->data_type()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100729 << " Input shape: " << input->info()->tensor_shape()
730 << " Weights shape: " << weights->info()->tensor_shape()
731 << " Output shape: " << output->info()->tensor_shape()
Georgios Pinitas05045c12018-12-07 18:31:47 +0000732 << " Depth multiplier: " << depth_multiplier
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000733 << qss.str()
Georgios Pinitas60e98252018-10-22 16:17:20 +0100734 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100735 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100736 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100737}
738
thecha010a05e6a2020-08-28 18:40:38 +0100739/** Create a backend depth to space layer function
740 *
741 * @tparam DepthToSpaceLayerNode Function Backend depth to space function
742 * @tparam TargetInfo Target-specific information
743 *
744 * @param[in] node Node to create the backend function for
745 *
746 * @return Backend depth to space layer function
747 */
748template <typename DepthToSpaceLayerFunction, typename TargetInfo>
749std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
750{
751 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
752
753 // Extract IO and info
754 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
755 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
756
757 ARM_COMPUTE_ERROR_ON(input == nullptr);
758 ARM_COMPUTE_ERROR_ON(output == nullptr);
759
760 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000761 auto func = std::make_unique<DepthToSpaceLayerFunction>();
thecha010a05e6a2020-08-28 18:40:38 +0100762 func->configure(input, output, node.block_shape());
763
764 // Log info
765 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
766 << node.name()
767 << " Type: " << node.type()
768 << " Target: " << TargetInfo::TargetType
769 << " Data Type: " << input->info()->data_type()
770 << " Input shape: " << input->info()->tensor_shape()
771 << " Block Size: " << node.block_shape()
772 << " Output shape: " << output->info()->tensor_shape()
773 << std::endl);
774
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100775 return std::move(func);
thecha010a05e6a2020-08-28 18:40:38 +0100776}
777
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000778/** Create a backend dequantize layer function
779 *
780 * @tparam DequantizationLayer Function Backend dequantize function
781 * @tparam TargetInfo Target-specific information
782 *
783 * @param[in] node Node to create the backend function for
784 *
785 * @return Backend dequantize layer function
786 */
787template <typename DequantizationLayerFunction, typename TargetInfo>
788std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
789{
790 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
791
792 // Extract IO and info
793 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
794 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
795
796 ARM_COMPUTE_ERROR_ON(input == nullptr);
797 ARM_COMPUTE_ERROR_ON(output == nullptr);
798
799 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000800 auto func = std::make_unique<DequantizationLayerFunction>();
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000801 func->configure(input, output);
802
803 // Log info
804 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
805 << node.name()
806 << " Type: " << node.type()
807 << " Target: " << TargetInfo::TargetType
808 << " Data Type: " << input->info()->data_type()
809 << " Input shape: " << input->info()->tensor_shape()
810 << " Input quantization info: " << output->info()->quantization_info()
811 << " Output shape: " << output->info()->tensor_shape()
812 << std::endl);
813
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100814 return std::move(func);
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000815}
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000816/** Create a backend detection output layer function
817 *
818 * @tparam DetectionOutputLayer Function Backend detection output function
819 * @tparam TargetInfo Target-specific information
820 *
821 * @param[in] node Node to create the backend function for
822 *
823 * @return Backend detection output layer function
824 */
825template <typename DetectionOutputLayerFunction, typename TargetInfo>
826std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
827{
828 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
829
830 // Extract IO and info
831 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
832 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
833 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
834 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
835 const DetectionOutputLayerInfo detect_info = node.detection_output_info();
836
837 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
838 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
839 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
840 ARM_COMPUTE_ERROR_ON(output == nullptr);
841
842 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000843 auto func = std::make_unique<DetectionOutputLayerFunction>();
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000844 func->configure(input0, input1, input2, output, detect_info);
845
846 // Log info
847 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
848 << node.name()
849 << " Type: " << node.type()
850 << " Target: " << TargetInfo::TargetType
851 << " Data Type: " << input0->info()->data_type()
852 << " Input0 shape: " << input0->info()->tensor_shape()
853 << " Input1 shape: " << input1->info()->tensor_shape()
854 << " Input2 shape: " << input2->info()->tensor_shape()
855 << " Output shape: " << output->info()->tensor_shape()
856 << " DetectionOutputLayer info: " << detect_info
857 << std::endl);
858
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100859 return std::move(func);
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000860}
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000861
862/** Create a backend detection post process layer function
863 *
864 * @tparam DetectionPostProcessLayerFunction Backend detection output function
865 * @tparam TargetInfo Target-specific information
866 *
867 * @param[in] node Node to create the backend function for
868 *
869 * @return Backend detection post process layer function
870 */
871template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
872std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
873{
874 validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
875
876 // Extract IO and info
877 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
878 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
879 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
880 typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
881 typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
882 typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
883 typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
884 const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
885
886 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
887 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
888 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
889 ARM_COMPUTE_ERROR_ON(output0 == nullptr);
890 ARM_COMPUTE_ERROR_ON(output1 == nullptr);
891 ARM_COMPUTE_ERROR_ON(output2 == nullptr);
892 ARM_COMPUTE_ERROR_ON(output3 == nullptr);
893
894 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000895 auto func = std::make_unique<DetectionPostProcessLayerFunction>();
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000896 func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
897
898 // Log info
899 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
900 << node.name()
901 << " Type: " << node.type()
902 << " Target: " << TargetInfo::TargetType
903 << " Data Type: " << input0->info()->data_type()
904 << " Input0 shape: " << input0->info()->tensor_shape()
905 << " Input1 shape: " << input1->info()->tensor_shape()
906 << " Input2 shape: " << input2->info()->tensor_shape()
907 << " Output0 shape: " << output0->info()->tensor_shape()
908 << " Output1 shape: " << output1->info()->tensor_shape()
909 << " Output2 shape: " << output2->info()->tensor_shape()
910 << " Output3 shape: " << output3->info()->tensor_shape()
911 << " DetectionPostProcessLayer info: " << detect_info
912 << std::endl);
913
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100914 return std::move(func);
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000915}
916
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100917/** Create a backend element-wise operation layer function
918 *
919 * @tparam EltwiseFunctions Backend element-wise function
920 * @tparam TargetInfo Target-specific information
921 *
922 * @param[in] node Node to create the backend function for
923 *
924 * @return Backend element-wise operation layer function
925 */
926template <typename EltwiseFunctions, typename TargetInfo>
927std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
928{
929 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
930
931 // Extract IO and info
932 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
933 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
934 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
935 const EltwiseOperation eltwise_op = node.eltwise_operation();
936 const ConvertPolicy convert_policy = node.convert_policy();
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000937 const ActivationLayerInfo act_info = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100938 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
939 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
940 ARM_COMPUTE_ERROR_ON(output == nullptr);
941
942 std::unique_ptr<IFunction> func = nullptr;
943 std::string func_name;
Georgios Pinitase2220552018-07-20 13:23:44 +0100944 if(eltwise_op == EltwiseOperation::Add)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100945 {
946 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
947 std::string("ArithmeticAddition"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000948 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100949 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100950 else if(eltwise_op == EltwiseOperation::Sub)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100951 {
952 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
953 std::string("ArithmeticSubtraction"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000954 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100955 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100956 else if(eltwise_op == EltwiseOperation::Mul)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100957 {
958 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
959 std::string("PixelWiseMultiplication"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000960 input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100961 }
thecha01f8e35842020-07-28 17:28:17 +0100962 else if(eltwise_op == EltwiseOperation::Max)
963 {
964 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
965 std::string("ElementwiseMaximum"),
966 input1, input2, output, act_info);
967 }
Alessandro Navone6413e492021-02-02 11:39:05 +0000968 else if(eltwise_op == EltwiseOperation::Div)
969 {
970 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Division>(
971 std::string("ArithmeticDivision"),
972 input1, input2, output, act_info);
973 }
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100974 else
975 {
976 ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
977 }
978
979 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000980 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
981 << node.name()
982 << " Type: " << node.type()
983 << " Target: " << TargetInfo::TargetType
984 << " Operation: " << func_name
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100985 << " Data Type: " << input1->info()->data_type()
Pablo Tello32521432018-11-15 14:43:10 +0000986 << " Shape: " << input1->info()->tensor_shape()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100987 << std::endl);
988
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100989 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100990}
991
Sheri Zhang16dddd22020-05-27 15:03:48 +0100992/** Create a backend unary element-wise operation layer function
993 *
994 * @tparam UnaryEltwiseFunctions Backend unary element-wise function
995 * @tparam TargetInfo Target-specific information
996 *
997 * @param[in] node Node to create the backend function for
998 *
999 * @return Backend unary element-wise operation layer function
1000 */
1001template <typename UnaryEltwiseFunctions, typename TargetInfo>
1002std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
1003{
1004 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1005
1006 // Extract IO and info
1007 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1008 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1009 const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
1010
1011 ARM_COMPUTE_ERROR_ON(input == nullptr);
1012 ARM_COMPUTE_ERROR_ON(output == nullptr);
1013
1014 std::unique_ptr<IFunction> func = nullptr;
1015 std::string func_name;
1016 if(eltwise_op == UnaryEltwiseOperation::Exp)
1017 {
1018 std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
1019 std::string("Exp"),
1020 input, output);
1021 }
1022 else
1023 {
1024 ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
1025 }
1026
1027 // Log info
1028 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1029 << node.name()
1030 << " Type: " << node.type()
1031 << " Target: " << TargetInfo::TargetType
1032 << " Operation: " << func_name
1033 << " Data Type: " << input->info()->data_type()
1034 << " Shape: " << input->info()->tensor_shape()
1035 << std::endl);
1036
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001037 return std::move(func);
Sheri Zhang16dddd22020-05-27 15:03:48 +01001038}
1039
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001040/** Create a backend flatten layer function
1041 *
1042 * @tparam FlattenLayerFunction Backend flatten function
1043 * @tparam TargetInfo Target-specific information
1044 *
1045 * @param[in] node Node to create the backend function for
1046 *
1047 * @return Backend flatten layer function
1048 */
1049template <typename FlattenLayerFunction, typename TargetInfo>
1050std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
1051{
1052 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1053
1054 // Extract IO and info
1055 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1056 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1057
Georgios Pinitase2220552018-07-20 13:23:44 +01001058 ARM_COMPUTE_ERROR_ON(input == nullptr);
1059 ARM_COMPUTE_ERROR_ON(output == nullptr);
1060
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001061 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001062 auto func = std::make_unique<FlattenLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001063 func->configure(input, output);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001064
1065 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001066 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1067 << node.name()
1068 << " Type: " << node.type()
1069 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001070 << " Data Type: " << input->info()->data_type()
1071 << " Input shape: " << input->info()->tensor_shape()
1072 << " Output shape: " << output->info()->tensor_shape()
1073 << std::endl);
1074
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001075 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001076}
1077
1078/** Create a backend fully connected layer function
1079 *
1080 * @tparam FullyConnectedLayerFunction Backend fully-connected function
1081 * @tparam TargetInfo Target-specific information
1082 *
1083 * @param[in] node Node to create the backend function for
1084 * @param[in] ctx Graph context
1085 *
1086 * @return Backend fully connected layer function
1087 */
1088template <typename FullyConnectedLayerFunction, typename TargetInfo>
1089std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
1090{
1091 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1092
1093 // Extract IO and info
1094 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1095 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
1096 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
1097 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +01001098 const FullyConnectedLayerInfo fc_info = node.info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001099
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001100 ARM_COMPUTE_ERROR_ON(input == nullptr);
1101 ARM_COMPUTE_ERROR_ON(weights == nullptr);
1102 ARM_COMPUTE_ERROR_ON(output == nullptr);
1103
Georgios Pinitase2220552018-07-20 13:23:44 +01001104 // Create and configure function
Michalis Spyrou1a569a32019-09-10 17:20:34 +01001105 auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1106 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001107 auto func = std::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
Georgios Pinitase2220552018-07-20 13:23:44 +01001108 func->configure(input, weights, biases, output, fc_info);
1109
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001110 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1111
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001112 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001113 std::ostringstream qss;
1114 if(is_quantized)
1115 {
1116 qss << " Input QuantInfo: " << input->info()->quantization_info()
1117 << " Weights QuantInfo: " << weights->info()->quantization_info()
1118 << " Output QuantInfo: " << output->info()->quantization_info();
1119 }
Pablo Tello32521432018-11-15 14:43:10 +00001120 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1121 << node.name()
1122 << " Type: " << node.type()
1123 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001124 << " Data Type: " << input->info()->data_type()
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001125 << qss.str()
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001126 << " Input shape: " << input->info()->tensor_shape()
1127 << " Weights shape: " << weights->info()->tensor_shape()
1128 << " Output shape: " << output->info()->tensor_shape()
1129 << std::endl);
1130
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001131 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001132}
1133
Manuel Bottini5209be52019-02-13 16:34:56 +00001134/** Create a backend generate proposals layer function
1135 *
1136 * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1137 * @tparam TargetInfo Target-specific information
1138 *
1139 * @param[in] node Node to create the backend function for
1140 * @param[in] ctx Graph context
1141 *
1142 * @return Backend generate proposals layer function
1143 */
1144template <typename GenerateProposalsLayerFunction, typename TargetInfo>
1145std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1146{
1147 validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1148
1149 // Extract IO and info
1150 typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1151 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1152 typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1153 typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1154 typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1155 typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1156 const GenerateProposalsInfo info = node.info();
1157
1158 ARM_COMPUTE_ERROR_ON(scores == nullptr);
1159 ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1160 ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1161 ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1162 ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1163
1164 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001165 auto func = std::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Manuel Bottini5209be52019-02-13 16:34:56 +00001166 func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1167
1168 // Log info
1169 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1170 << " Target " << TargetInfo::TargetType
1171 << " Data Type: " << scores->info()->data_type()
1172 << " Scores shape: " << scores->info()->tensor_shape()
1173 << " Deltas shape: " << deltas->info()->tensor_shape()
1174 << " Anchors shape: " << anchors->info()->tensor_shape()
1175 << " Proposals shape: " << proposals->info()->tensor_shape()
1176 << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1177 << " Scores Out shape: " << scores_out->info()->tensor_shape()
1178 << std::endl);
1179
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001180 return std::move(func);
Manuel Bottini5209be52019-02-13 16:34:56 +00001181}
1182
thecha013603aff2020-09-01 14:52:38 +01001183/** Create a backend l2 normalization layer function
1184 *
1185 * @tparam NormalizationLayerFunction Backend normalization function
1186 * @tparam TargetInfo Target-specific information
1187 *
1188 * @param[in] node Node to create the backend function for
1189 * @param[in] ctx Graph context
1190 *
1191 * @return Backend normalization layer function
1192 */
1193template <typename L2NormalizeLayerFunction, typename TargetInfo>
1194std::unique_ptr<IFunction> create_l2_normalize_layer(L2NormalizeLayerNode &node, GraphContext &ctx)
1195{
1196 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1197
1198 // Extract IO and info
1199 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1200 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1201 int axis = node.axis();
1202 float epsilon = node.epsilon();
1203
1204 ARM_COMPUTE_ERROR_ON(input == nullptr);
1205 ARM_COMPUTE_ERROR_ON(output == nullptr);
1206
1207 // Create and configure function
1208 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001209 auto func = std::make_unique<L2NormalizeLayerFunction>(mm);
thecha013603aff2020-09-01 14:52:38 +01001210 func->configure(input, output, axis, epsilon);
1211
1212 // Log info
1213 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1214 << node.name()
1215 << " Type: " << node.type()
1216 << " Target: " << TargetInfo::TargetType
1217 << " Data Type: " << input->info()->data_type()
1218 << " Input shape: " << input->info()->tensor_shape()
1219 << " Output shape: " << output->info()->tensor_shape()
1220 << " Axis: " << axis
1221 << " Epsilon: " << epsilon
1222 << std::endl);
1223
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001224 return std::move(func);
thecha013603aff2020-09-01 14:52:38 +01001225}
1226
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001227/** Create a backend normalization layer function
1228 *
1229 * @tparam NormalizationLayerFunction Backend normalization function
1230 * @tparam TargetInfo Target-specific information
1231 *
1232 * @param[in] node Node to create the backend function for
1233 * @param[in] ctx Graph context
1234 *
1235 * @return Backend normalization layer function
1236 */
1237template <typename NormalizationLayerFunction, typename TargetInfo>
1238std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1239{
1240 ARM_COMPUTE_UNUSED(ctx);
1241
1242 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1243
1244 // Extract IO and info
1245 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1246 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1247 const NormalizationLayerInfo norm_info = node.normalization_info();
1248 ARM_COMPUTE_ERROR_ON(input == nullptr);
1249 ARM_COMPUTE_ERROR_ON(output == nullptr);
1250
1251 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001252 auto func = std::make_unique<NormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001253 func->configure(input, output, norm_info);
1254
1255 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001256 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1257 << node.name()
1258 << " Type: " << node.type()
1259 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001260 << " Data Type: " << input->info()->data_type()
1261 << " Input shape: " << input->info()->tensor_shape()
1262 << " Output shape: " << output->info()->tensor_shape()
1263 << " Normalization info: " << norm_info.type()
1264 << std::endl);
1265
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001266 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001267}
1268
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001269/** Create a backend normalize planar YUV layer function
1270 *
1271 * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1272 * @tparam TargetInfo Target-specific information
1273 *
1274 * @param[in] node Node to create the backend function for
1275 *
1276 * @return Backend normalize plnar YUV layer function
1277 */
1278template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
1279std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1280{
1281 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1282
1283 // Extract IO and info
1284 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1285 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1286 typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1287 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1288 ARM_COMPUTE_ERROR_ON(input == nullptr);
1289 ARM_COMPUTE_ERROR_ON(mean == nullptr);
1290 ARM_COMPUTE_ERROR_ON(std == nullptr);
1291 ARM_COMPUTE_ERROR_ON(output == nullptr);
1292
1293 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001294 auto func = std::make_unique<NormalizePlanarYUVLayerFunction>();
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001295 func->configure(input, output, mean, std);
1296
1297 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001298 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1299 << node.name()
1300 << " Type: " << node.type()
1301 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001302 << " Data Type: " << input->info()->data_type()
1303 << " Shape: " << input->info()->tensor_shape()
1304 << std::endl);
1305
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001306 return std::move(func);
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001307}
1308
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001309/** Create a backend pad layer function
1310 *
1311 * @tparam PadLayerFunction Backend pad function
1312 * @tparam TargetInfo Target-specific information
1313 *
1314 * @param[in] node Node to create the backend function for
1315 *
1316 * @return Backend pad layer function
1317 */
1318template <typename PadLayerFunction, typename TargetInfo>
1319std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1320{
1321 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1322
1323 // Extract IO and info
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001324 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1325 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1326 const PaddingList &padding = node.padding();
1327 const PixelValue pad_value = node.pad_value();
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001328 ARM_COMPUTE_ERROR_ON(input == nullptr);
1329 ARM_COMPUTE_ERROR_ON(output == nullptr);
1330
1331 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001332 auto func = std::make_unique<PadLayerFunction>();
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001333 func->configure(input, output, padding, pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001334
1335 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001336 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1337 << node.name()
1338 << " Type: " << node.type()
1339 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001340 << " Data Type: " << input->info()->data_type()
1341 << " Input shape: " << input->info()->tensor_shape()
1342 << " Output shape: " << output->info()->tensor_shape()
1343 << std::endl);
1344
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001345 return std::move(func);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001346}
1347
Georgios Pinitas57c48242018-08-02 13:41:49 +01001348/** Create a backend permute layer function
1349 *
1350 * @tparam PermuteLayerFunction Backend permute function
1351 * @tparam TargetInfo Target-specific information
1352 *
1353 * @param[in] node Node to create the backend function for
1354 *
1355 * @return Backend permute layer function
1356 */
1357template <typename PermuteLayerFunction, typename TargetInfo>
1358std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1359{
1360 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1361
1362 // Extract IO and info
1363 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1364 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1365 const PermutationVector &perm = node.permutation_vector();
1366 ARM_COMPUTE_ERROR_ON(input == nullptr);
1367 ARM_COMPUTE_ERROR_ON(output == nullptr);
1368
1369 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001370 auto func = std::make_unique<PermuteLayerFunction>();
Georgios Pinitas57c48242018-08-02 13:41:49 +01001371 func->configure(input, output, perm);
1372
1373 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001374 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1375 << node.name()
1376 << " Type: " << node.type()
1377 << " Target: " << TargetInfo::TargetType
Georgios Pinitas57c48242018-08-02 13:41:49 +01001378 << " Data Type: " << input->info()->data_type()
1379 << " Input shape: " << input->info()->tensor_shape()
1380 << " Output shape: " << output->info()->tensor_shape()
1381 << " Permutation vector: " << perm
1382 << std::endl);
1383
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001384 return std::move(func);
Georgios Pinitas57c48242018-08-02 13:41:49 +01001385}
1386
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001387/** Create a backend pooling layer function
1388 *
1389 * @tparam PoolingLayerFunction Backend pooling function
1390 * @tparam TargetInfo Target-specific information
1391 *
1392 * @param[in] node Node to create the backend function for
1393 *
1394 * @return Backend pooling layer function
1395 */
1396template <typename PoolingLayerFunction, typename TargetInfo>
1397std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1398{
1399 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1400
1401 // Extract IO and info
1402 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1403 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1404 const PoolingLayerInfo pool_info = node.pooling_info();
1405 ARM_COMPUTE_ERROR_ON(input == nullptr);
1406 ARM_COMPUTE_ERROR_ON(output == nullptr);
1407
1408 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001409 auto func = std::make_unique<PoolingLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001410 func->configure(input, output, pool_info);
1411
1412 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001413 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1414 << node.name()
1415 << " Type: " << node.type()
1416 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001417 << " Data Type: " << input->info()->data_type()
1418 << " Input shape: " << input->info()->tensor_shape()
1419 << " Output shape: " << output->info()->tensor_shape()
Sang-Hoon Park0cb3da62020-01-15 12:39:56 +00001420 << " Pooling info: " << pool_info.pool_type
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001421 << std::endl);
1422
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001423 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001424}
1425
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001426/** Create a backend PRelu layer function
1427 *
1428 * @tparam PReluFunction Backend PRelu function
1429 * @tparam TargetInfo Target-specific information
1430 *
1431 * @param[in] node Node to create the backend function for
1432 *
1433 * @return Backend PRelu layer function
1434 */
1435template <typename PReluFunction, typename TargetInfo>
1436std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1437{
1438 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1439
1440 // Extract IO and info
1441 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1442 typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1443 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1444 ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1445 ARM_COMPUTE_ERROR_ON(output == nullptr);
1446
1447 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001448 auto func = std::make_unique<PReluFunction>();
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001449 func->configure(input, alpha, output);
1450
1451 // Log info
1452 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1453 << node.name()
1454 << " Type: " << node.type()
1455 << " Target: " << TargetInfo::TargetType
1456 << " Data Type: " << input->info()->data_type()
1457 << " Input shape: " << input->info()->tensor_shape()
1458 << " Output shape: " << output->info()->tensor_shape()
1459 << std::endl);
1460
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001461 return std::move(func);
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001462}
1463
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001464/** Create a backend print layer function
1465 *
1466 * @tparam TargetInfo Target-specific information
1467 *
1468 * @param[in] node Node to create the backend function for
1469 *
1470 * @return Backend print layer function
1471 */
1472template <typename TargetInfo>
1473std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1474{
1475 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1476
1477 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1478 ARM_COMPUTE_ERROR_ON(input == nullptr);
1479 ARM_COMPUTE_UNUSED(input);
1480
1481 // Log info
1482 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1483 << node.name()
1484 << " Type: " << node.type()
1485 << " Target: " << TargetInfo::TargetType
1486 << " Data Type: " << input->info()->data_type()
1487 << " Input shape: " << input->info()->tensor_shape()
1488 << std::endl);
1489
1490 return nullptr;
1491}
1492
Pablo Tello32521432018-11-15 14:43:10 +00001493/** Create a backend priorbox layer function
1494 *
1495 * @tparam PriorBoxLayerFunction Backend priorbox function
1496 * @tparam TargetInfo Target-specific information
1497 *
1498 * @param[in] node Node to create the backend function for
1499 *
1500 * @return Backend priorbox layer function
1501 */
1502template <typename PriorBoxLayerFunction, typename TargetInfo>
1503std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1504{
1505 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1506
1507 // Extract IO and info
1508 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1509 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1510 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1511 const PriorBoxLayerInfo prior_info = node.priorbox_info();
1512 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1513 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1514 ARM_COMPUTE_ERROR_ON(output == nullptr);
1515
1516 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001517 auto func = std::make_unique<PriorBoxLayerFunction>();
Pablo Tello32521432018-11-15 14:43:10 +00001518 func->configure(input0, input1, output, prior_info);
1519
1520 // Log info
1521 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1522 << node.name()
1523 << " Type: " << node.type()
1524 << " Target: " << TargetInfo::TargetType
1525 << " Data Type: " << input0->info()->data_type()
1526 << " Input0 shape: " << input0->info()->tensor_shape()
1527 << " Input1 shape: " << input1->info()->tensor_shape()
1528 << " Output shape: " << output->info()->tensor_shape()
1529 << " PriorBoxLayer info: " << prior_info
1530 << std::endl);
1531
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001532 return std::move(func);
Pablo Tello32521432018-11-15 14:43:10 +00001533}
1534
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001535/** Create a backend quantization layer function
1536 *
1537 * @tparam QuantizationLayerFunction Backend quantization function
1538 * @tparam TargetInfo Target-specific information
1539 *
1540 * @param[in] node Node to create the backend function for
1541 *
1542 * @return Backend quantization layer function
1543 */
1544template <typename QuantizationLayerFunction, typename TargetInfo>
1545std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1546{
1547 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1548
1549 // Extract IO and info
1550 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1551 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1552 ARM_COMPUTE_ERROR_ON(input == nullptr);
1553 ARM_COMPUTE_ERROR_ON(output == nullptr);
1554
1555 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001556 auto func = std::make_unique<QuantizationLayerFunction>();
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001557 func->configure(input, output);
1558
1559 // Log info
1560 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1561 << node.name()
1562 << " Type: " << node.type()
1563 << " Target: " << TargetInfo::TargetType
1564 << " Data Type: " << input->info()->data_type()
1565 << " Input shape: " << input->info()->tensor_shape()
1566 << " Output shape: " << output->info()->tensor_shape()
1567 << std::endl);
1568
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001569 return std::move(func);
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001570}
1571
thecha01d64444b2020-09-07 14:50:21 +01001572/** Create a backend reduction operation layer function
1573 *
1574 * @tparam ReductionOperationFunction Backend reduction operation function
1575 * @tparam TargetInfo Target-specific information
1576 *
1577 * @param[in] node Node to create the backend function for
1578 * @param[in] ctx Graph context
1579 *
1580 * @return Backend reduction sum layer function
1581 */
1582template <typename ReductionOperationFunction, typename TargetInfo>
1583std::unique_ptr<IFunction> create_reduction_operation_layer(ReductionLayerNode &node, GraphContext &ctx)
1584{
1585 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1586
1587 // Extract IO and info
1588 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1589 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1590 ReductionOperation op = node.op();
1591 int axis = node.axis();
1592 bool keep_dims = node.keep_dims();
1593 ARM_COMPUTE_ERROR_ON(input == nullptr);
1594 ARM_COMPUTE_ERROR_ON(output == nullptr);
1595
1596 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001597 auto func = std::make_unique<ReductionOperationFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
thecha01d64444b2020-09-07 14:50:21 +01001598 func->configure(input, output, axis, op, keep_dims);
1599
1600 // Log info
1601 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1602 << node.name()
1603 << " Type: " << node.type()
1604 << " Target: " << TargetInfo::TargetType
1605 << " Data Type: " << input->info()->data_type()
1606 << " Input shape: " << input->info()->tensor_shape()
1607 << " Output shape: " << output->info()->tensor_shape()
1608 << " Operation: " << op
1609 << " Axis: " << axis
1610 << " Keep dimensions:" << keep_dims
1611 << std::endl);
1612
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001613 return std::move(func);
thecha01d64444b2020-09-07 14:50:21 +01001614}
1615
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001616/** Create a backend reorg layer function
1617 *
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001618 * @tparam ReorgLayerFunction Backend reorg function
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001619 * @tparam TargetInfo Target-specific information
1620 *
1621 * @param[in] node Node to create the backend function for
1622 *
1623 * @return Backend reshape layer function
1624 */
1625template <typename ReorgLayerFunction, typename TargetInfo>
1626std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1627{
1628 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1629
1630 // Extract IO and info
1631 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1632 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1633 ARM_COMPUTE_ERROR_ON(input == nullptr);
1634 ARM_COMPUTE_ERROR_ON(output == nullptr);
1635
1636 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001637 auto func = std::make_unique<ReorgLayerFunction>();
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001638 func->configure(input, output, node.stride());
1639
1640 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001641 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1642 << node.name()
1643 << " Type: " << node.type()
1644 << " Target: " << TargetInfo::TargetType
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001645 << " Data Type: " << input->info()->data_type()
1646 << " Input shape: " << input->info()->tensor_shape()
1647 << " Output shape: " << output->info()->tensor_shape()
1648 << std::endl);
1649
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001650 return std::move(func);
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001651}
1652
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001653/** Create a backend reshape layer function
1654 *
1655 * @tparam ReshapeLayerFunction Backend reshape function
1656 * @tparam TargetInfo Target-specific information
1657 *
1658 * @param[in] node Node to create the backend function for
1659 *
1660 * @return Backend reshape layer function
1661 */
1662template <typename ReshapeLayerFunction, typename TargetInfo>
1663std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1664{
1665 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1666
1667 // Extract IO and info
1668 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1669 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1670 ARM_COMPUTE_ERROR_ON(input == nullptr);
1671 ARM_COMPUTE_ERROR_ON(output == nullptr);
1672
1673 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001674 auto func = std::make_unique<ReshapeLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001675 func->configure(input, output);
1676
1677 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001678 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1679 << node.name()
1680 << " Type: " << node.type()
1681 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001682 << " Data Type: " << input->info()->data_type()
1683 << " Input shape: " << input->info()->tensor_shape()
1684 << " Output shape: " << output->info()->tensor_shape()
1685 << std::endl);
1686
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001687 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001688}
1689
1690/** Create a backend resize layer function
1691 *
1692 * @tparam ResizeLayerFunction Backend resize function
1693 * @tparam TargetInfo Target-specific information
1694 *
1695 * @param[in] node Node to create the backend function for
1696 *
1697 * @return Backend resize layer function
1698 */
1699template <typename ResizeLayerFunction, typename TargetInfo>
1700std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1701{
1702 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1703
1704 // Extract IO and info
1705 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1706 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1707 ARM_COMPUTE_ERROR_ON(input == nullptr);
1708 ARM_COMPUTE_ERROR_ON(output == nullptr);
1709 const InterpolationPolicy policy = node.policy();
1710
1711 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001712 auto func = std::make_unique<ResizeLayerFunction>();
Georgios Pinitasc53266e2020-12-09 03:11:53 +00001713 func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT, PixelValue(), SamplingPolicy::CENTER, false, false });
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001714
1715 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001716 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1717 << node.name()
1718 << " Type: " << node.type()
1719 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001720 << " Data Type: " << input->info()->data_type()
1721 << " Input shape: " << input->info()->tensor_shape()
1722 << " Output shape: " << output->info()->tensor_shape()
1723 << " Interpolation: " << policy
1724 << std::endl);
1725
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001726 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001727}
1728
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001729/** Create a backend ROI align layer function
1730 *
1731 * @tparam ROIAlignLayerFunction ROI Align function
1732 * @tparam TargetInfo Target-specific information
1733 *
1734 * @param[in] node Node to create the backend function for
1735 *
1736 * @return ROI Align layer function
1737 */
1738template <typename ROIAlignLayerFunction, typename TargetInfo>
1739std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1740{
1741 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1742
1743 // Extract IO and info
1744 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1745 typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1746 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1747 ARM_COMPUTE_ERROR_ON(input == nullptr);
1748 ARM_COMPUTE_ERROR_ON(output == nullptr);
1749 ARM_COMPUTE_ERROR_ON(rois == nullptr);
1750
1751 const ROIPoolingLayerInfo pool_info = node.pooling_info();
1752
1753 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001754 auto func = std::make_unique<ROIAlignLayerFunction>();
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001755
1756 func->configure(input, rois, output, pool_info);
1757
1758 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +00001759 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1760 << node.name()
1761 << " Type: " << node.type()
1762 << " Target: " << TargetInfo::TargetType
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001763 << " Data Type: " << input->info()->data_type()
1764 << " Input shape: " << input->info()->tensor_shape()
1765 << " Output shape: " << output->info()->tensor_shape()
1766 << " ROIs shape: " << rois->info()->tensor_shape()
1767 << " ROIPooling width: " << pool_info.pooled_width()
1768 << " ROIPooling height: " << pool_info.pooled_height()
1769 << std::endl);
1770
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001771 return std::move(func);
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001772}
1773
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001774/** Create a backend slice layer function
1775 *
1776 * @tparam SliceLayerFunction Backend slice function
1777 * @tparam TargetInfo Target-specific information
1778 *
1779 * @param[in] node Node to create the backend function for
1780 *
1781 * @return Backend slice layer function
1782 */
1783template <typename SliceLayerFunction, typename TargetInfo>
1784std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1785{
1786 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1787
1788 // Extract IO and info
1789 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1790 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1791 ARM_COMPUTE_ERROR_ON(input == nullptr);
1792 ARM_COMPUTE_ERROR_ON(output == nullptr);
1793
1794 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001795 auto func = std::make_unique<SliceLayerFunction>();
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001796 func->configure(input, output, node.starts(), node.ends());
1797
1798 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001799 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1800 << node.name()
1801 << " Type: " << node.type()
1802 << " Target: " << TargetInfo::TargetType
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001803 << " Data Type: " << input->info()->data_type()
1804 << " Input shape: " << input->info()->tensor_shape()
1805 << " Output shape: " << output->info()->tensor_shape()
1806 << std::endl);
1807
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001808 return std::move(func);
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001809}
1810
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001811/** Create a backend softmax layer function
1812 *
1813 * @tparam SoftmaxLayerFunction Backend softmax function
1814 * @tparam TargetInfo Target-specific information
1815 *
1816 * @param[in] node Node to create the backend function for
1817 * @param[in] ctx Graph context
1818 *
1819 * @return Backend softmax layer function
1820 */
1821template <typename SoftmaxLayerFunction, typename TargetInfo>
1822std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1823{
1824 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1825
1826 // Extract IO and info
1827 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1828 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1829 const float beta = node.beta();
1830 ARM_COMPUTE_ERROR_ON(input == nullptr);
1831 ARM_COMPUTE_ERROR_ON(output == nullptr);
1832
1833 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001834 auto func = std::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001835 func->configure(input, output, beta);
1836
1837 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001838 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1839 << node.name()
1840 << " Type: " << node.type()
1841 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001842 << " Data Type: " << input->info()->data_type()
1843 << " Input shape: " << input->info()->tensor_shape()
1844 << " Output shape: " << output->info()->tensor_shape()
1845 << std::endl);
1846
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001847 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001848}
Michele Di Giorgioec699752019-03-22 15:25:32 +00001849
1850/** Create a backend layer stack function
1851 *
1852 * @tparam StackLayerFunction Backend stack function
1853 * @tparam TargetInfo Target-specific information
1854 *
1855 * @param[in] node Node to create the backend function for
1856 *
1857 * @return Backend stack layer function
1858 */
1859template <typename StackLayerFunction, typename TargetInfo>
1860std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1861{
1862 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1863 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1864
1865 // Extract IO and info
1866 std::vector<typename TargetInfo::TensorType *> inputs;
1867 for(unsigned int i = 0; i < node.num_inputs(); ++i)
1868 {
1869 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1870 }
1871 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1872 const int axis = node.axis();
1873
1874 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001875 auto func = std::make_unique<StackLayerFunction>();
Michele Di Giorgioec699752019-03-22 15:25:32 +00001876 func->configure(inputs, axis, output);
1877
1878 // Log info
1879 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1880 << node.name()
1881 << " Type: " << node.type()
1882 << " Target: " << TargetInfo::TargetType
1883 << " Data Type: " << output->info()->data_type()
1884 << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1885 << " Output shape: " << output->info()->tensor_shape()
1886 << " Num Inputs: " << inputs.size()
1887 << " Axis: " << axis
1888 << std::endl);
1889
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001890 return std::move(func);
Michele Di Giorgioec699752019-03-22 15:25:32 +00001891}
thecha012bfadd92020-08-12 17:25:51 +01001892
1893/** Create a backend slice layer function
1894 *
1895 * @tparam StridedSliceLayerFunction Backend strided slice function
1896 * @tparam TargetInfo Target-specific information
1897 *
1898 * @param[in] node Node to create the backend function for
1899 *
1900 * @return Backend strided slice layer function
1901 */
1902template <typename StridedSliceLayerFunction, typename TargetInfo>
1903std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1904{
1905 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1906
1907 // Extract IO and info
1908 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1909 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1910 Coordinates starts = node.starts();
1911 Coordinates ends = node.ends();
1912 BiStrides strides = node.strides();
1913 StridedSliceLayerInfo info = node.strided_slice_info();
1914
1915 ARM_COMPUTE_ERROR_ON(input == nullptr);
1916 ARM_COMPUTE_ERROR_ON(output == nullptr);
1917
1918 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001919 auto func = std::make_unique<StridedSliceLayerFunction>();
thecha012bfadd92020-08-12 17:25:51 +01001920 func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
1921
1922 // Log info
1923 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1924 << node.name()
1925 << " Type: " << node.type()
1926 << " Target: " << TargetInfo::TargetType
1927 << " Data Type: " << input->info()->data_type()
1928 << " Input shape: " << input->info()->tensor_shape()
1929 << " Output shape: " << output->info()->tensor_shape()
1930 << std::endl);
1931
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001932 return std::move(func);
thecha012bfadd92020-08-12 17:25:51 +01001933}
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001934} // namespace detail
1935} // namespace backends
1936} // namespace graph
1937} // namespace arm_compute
1938
Michalis Spyrouf4643372019-11-29 16:17:13 +00001939#endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */