blob: 55af056a4310d0dfa403331a3670e2bf0e1e47db [file] [log] [blame]
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001/*
Alessandro Navone6413e492021-02-02 11:39:05 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasda2491f2018-06-01 17:49:09 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25#define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
Georgios Pinitasda2491f2018-06-01 17:49:09 +010026
27#include "arm_compute/graph/Logger.h"
28#include "arm_compute/graph/Tensor.h"
29#include "arm_compute/graph/TypePrinter.h"
30#include "arm_compute/graph/Types.h"
Georgios Pinitas9e4824c2019-04-12 13:15:58 +010031#include "arm_compute/graph/Utils.h"
giuros01acce5042019-02-21 17:32:34 +000032#include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
Manuel Bottinibffb41e2019-06-20 16:00:27 +010033#include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010034#include "arm_compute/graph/backends/Utils.h"
35#include "arm_compute/graph/nodes/Nodes.h"
36
37#include "arm_compute/core/Error.h"
38#include "arm_compute/core/Helpers.h"
39#include "arm_compute/core/ITensorInfo.h"
Sang-Hoon Park68dd25f2020-10-19 16:00:11 +010040#include "support/Cast.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010041
42namespace arm_compute
43{
44namespace graph
45{
46namespace backends
47{
48namespace detail
49{
50/** Returns backing tensor of a given tensor
51 *
52 * @tparam TargetInfo Target information
53 *
54 * @param[in] tensor Tensor to extract the backing tensor from
55 *
56 * @return Backing tensor if present else nullptr
57 */
58template <typename TargetInfo>
59typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
60{
61 typename TargetInfo::TensorType *backing_tensor = nullptr;
62 if(tensor != nullptr)
63 {
64 ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
65 // Get backing tensor handle
66 ITensorHandle *tensor_handle = tensor->handle();
67 // Get backing tensor
68 backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
69 }
70
71 return backing_tensor;
72}
73
74template <typename TargetInfo>
75void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
76{
77 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
Pablo Tello32521432018-11-15 14:43:10 +000078 << " Target: " << TargetInfo::TargetType
79 << " ID: " << node.id()
80 << node.name()
Georgios Pinitasda2491f2018-06-01 17:49:09 +010081 << std::endl);
82
83 ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
84 ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
85 ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010086 ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
Georgios Pinitasda2491f2018-06-01 17:49:09 +010087}
88
89/** Creates a backend activation layer function
90 *
91 * @tparam ActivationLayerFunction Backend activation function
92 * @tparam TargetInfo Target-specific information
93 *
94 * @param[in] node Node to create the backend function for
95 *
96 * @return Backend activation layer function
97 */
98template <typename ActivationLayerFunction, typename TargetInfo>
99std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
100{
101 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
102
103 // Extract IO and info
104 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
105 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
106 const ActivationLayerInfo act_info = node.activation_info();
107
108 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000109 auto func = std::make_unique<ActivationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100110 func->configure(input, output, act_info);
111
Pablo Tello32521432018-11-15 14:43:10 +0000112 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
113 << node.name()
114 << " Type: " << node.type()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000115 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100116 << " Data Type: " << input->info()->data_type()
117 << " Shape: " << input->info()->tensor_shape()
118 << " Activation function: " << act_info.activation()
119 << " a: " << act_info.a()
120 << " b: " << act_info.b()
121 << " InPlace : " << is_in_place_operation(input, output)
122 << std::endl);
123
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100124 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100125}
126
thecha01e8f05da2020-08-24 17:21:41 +0100127/** Creates a backend argminmax layer function
128 *
129 * @tparam ArgMinMaxLayerFunction Backend activation function
130 * @tparam TargetInfo Target-specific information
131 *
132 * @param[in] node Node to create the backend function for
133 *
134 * @return Backend argminmax layer function
135 */
136template <typename ArgMinMaxLayerFunction, typename TargetInfo>
137std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
138{
139 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
140
141 // Extract IO and info
142 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
143 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
144 const ReductionOperation op = node.reduction_operation();
145 unsigned int axis = node.axis();
146
147 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000148 auto func = std::make_unique<ArgMinMaxLayerFunction>();
thecha01e8f05da2020-08-24 17:21:41 +0100149 func->configure(input, axis, output, op);
150
151 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
152 << node.name()
153 << " Type: " << node.type()
154 << " Target: " << TargetInfo::TargetType
155 << " Data Type: " << input->info()->data_type()
156 << " Shape: " << input->info()->tensor_shape()
157 << " Reduction Operation: " << op
158 << " axis: " << axis
159 << std::endl);
160
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100161 return std::move(func);
thecha01e8f05da2020-08-24 17:21:41 +0100162}
163
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100164/** Create a backend batch normalization layer function
165 *
166 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
167 * @tparam TargetInfo Target-specific information
168 *
169 * @param[in] node Node to create the backend function for
170 *
171 * @return Backend batch normalization layer function
172 */
173template <typename BatchNormalizationLayerFunction, typename TargetInfo>
174std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
175{
176 validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
177
178 // Extract IO and info
giuros01acce5042019-02-21 17:32:34 +0000179 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
180 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
181 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
182 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
183 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
184
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100185 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
186 const float epsilon = node.epsilon();
187 const ActivationLayerInfo fused_act = node.fused_activation();
188
189 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000190 auto func = std::make_unique<BatchNormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100191 func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
192
193 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000194 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
195 << node.name()
196 << " Type: " << node.type()
197 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100198 << " Data Type: " << input->info()->data_type()
199 << " Shape: " << input->info()->tensor_shape()
200 << " Epsilon: " << epsilon << " "
201 << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
Pablo Tello32521432018-11-15 14:43:10 +0000202 << " InPlace: " << is_in_place_operation(input, output)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100203 << std::endl);
204
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100205 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100206}
207
giuros01acce5042019-02-21 17:32:34 +0000208/** Create a backend batch normalization layer function
209 *
210 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
211 * @tparam TargetInfo Target-specific information
212 *
213 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000214 * @param[in] ctx Graph context
giuros01acce5042019-02-21 17:32:34 +0000215 *
216 * @return Backend batch normalization layer function
217 */
218template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000219std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
giuros01acce5042019-02-21 17:32:34 +0000220{
221 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
222
223 // Extract IO and info
224 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
225 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
226 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
227 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
228 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
229 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
230 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
231
232 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
233
234 const PadStrideInfo conv_info = node.convolution_info();
235 const unsigned int num_groups = node.num_groups();
236 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
237 const ActivationLayerInfo fused_act = node.fused_activation();
238 const float epsilon = node.epsilon();
239
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000240 // Create and configure function (we assume that functions have been validated before creation)
241 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
242 std::unique_ptr<IFunction> func;
243 std::string func_name;
244
245 using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
246
giuros01acce5042019-02-21 17:32:34 +0000247 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000248 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
249 std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
giuros01acce5042019-02-21 17:32:34 +0000250
251 // Log info
252 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
253 << node.name()
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100254 << " Type: " << node.type()
255 << " Target: " << TargetInfo::TargetType
256 << " Data Type: " << input->info()->data_type()
257 << " Input shape: " << input->info()->tensor_shape()
258 << " Weights shape: " << weights->info()->tensor_shape()
259 << " Output shape: " << output->info()->tensor_shape()
260 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
261 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100262 return std::move(func);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100263}
264
265/** Create a backend fused depthwise convolution batch normalization layer function
266 *
267 * @tparam FusedLayerTypes Fused layer types
268 * @tparam TargetInfo Target-specific information
269 *
270 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000271 * @param[in] ctx Graph context
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100272 *
273 * @return Backend fused depthwise convolution batch normalization layer function
274 */
275template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000276std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100277{
278 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
279
280 // Extract IO and info
281 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
282 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
283 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
284 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
285 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
286 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
287 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
288
289 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
290
291 const PadStrideInfo conv_info = node.convolution_info();
292 const unsigned int depth_multiplier = node.depth_multiplier();
293 const ActivationLayerInfo fused_act = node.fused_activation();
294 const float epsilon = node.epsilon();
295
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000296 // Create and configure function (we assume that functions have been validated before creation)
297 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
298 std::unique_ptr<IFunction> func;
299 std::string func_name;
300
301 using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
302
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100303 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000304 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
305 std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100306
307 // Log info
308 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
309 << node.name()
310 << " Type: " << node.type()
giuros01acce5042019-02-21 17:32:34 +0000311 << " Target: " << TargetInfo::TargetType
312 << " Data Type: " << input->info()->data_type()
313 << " Input shape: " << input->info()->tensor_shape()
314 << " Weights shape: " << weights->info()->tensor_shape()
315 << " Output shape: " << output->info()->tensor_shape()
316 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
317 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100318 return std::move(func);
giuros01acce5042019-02-21 17:32:34 +0000319}
320
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100321/** Create a backend bounding box transform layer function
322 *
323 * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
324 * @tparam TargetInfo Target-specific information
325 *
326 * @param[in] node Node to create the backend function for
327 *
328 * @return Backend bounding box transform layer function
329 */
330template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
331std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
332{
333 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
334
335 // Extract IO and info
336 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
337 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
338 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
339 const BoundingBoxTransformInfo bbox_info = node.info();
340
341 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000342 auto func = std::make_unique<BoundingBoxTransformLayerFunction>();
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100343 func->configure(input, output, deltas, bbox_info);
344
345 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000346 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
347 << node.name()
348 << " Type: " << node.type()
349 << " Target: " << TargetInfo::TargetType
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100350 << " Data Type: " << input->info()->data_type()
351 << " Shape: " << input->info()->tensor_shape()
352 << " BoundingBox Info img W: " << bbox_info.img_width() << " "
353 << " BoundingBox Info img H: " << bbox_info.img_height() << " "
354 << std::endl);
355
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100356 return std::move(func);
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100357}
358
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100359/** Create a backend channel shuffle layer function
360 *
361 * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
362 * @tparam TargetInfo Target-specific information
363 *
364 * @param[in] node Node to create the backend function for
365 *
366 * @return Backend channel shuffle layer function
367 */
368template <typename ChannelShuffleLayerFunction, typename TargetInfo>
369std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
370{
371 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
372
373 // Extract IO and info
374 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
375 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
376 const unsigned int num_groups = node.num_groups();
377
378 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000379 auto func = std::make_unique<ChannelShuffleLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100380 func->configure(input, output, num_groups);
381
Pablo Tello32521432018-11-15 14:43:10 +0000382 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
383 << node.name()
384 << " Type: " << node.type()
385 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100386 << " Data Type: " << input->info()->data_type()
387 << " Shape: " << input->info()->tensor_shape()
388 << " Num groups: " << num_groups
389 << std::endl);
390
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100391 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100392}
393
Georgios Pinitase2220552018-07-20 13:23:44 +0100394/** Create a backend layer concatenate function
395 *
396 * @tparam ConcatenateLayerFunction Backend concatenate function
397 * @tparam TargetInfo Target-specific information
398 *
399 * @param[in] node Node to create the backend function for
400 *
401 * @return Backend concatenate layer function
402 */
403template <typename ConcatenateLayerFunction, typename TargetInfo>
404std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
405{
406 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
407 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
408
409 // Return nullptr if depth concatenate is switched off
410 if(!node.is_enabled())
411 {
412 return nullptr;
413 }
414
415 // Extract IO and info
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100416 std::vector<typename TargetInfo::SrcTensorType *> inputs;
Georgios Pinitase2220552018-07-20 13:23:44 +0100417 for(unsigned int i = 0; i < node.num_inputs(); ++i)
418 {
419 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
420 }
421 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas9e4824c2019-04-12 13:15:58 +0100422 const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
423 const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
Georgios Pinitase2220552018-07-20 13:23:44 +0100424
425 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000426 auto func = std::make_unique<ConcatenateLayerFunction>();
Georgios Pinitase2220552018-07-20 13:23:44 +0100427 func->configure(inputs, output, concat_axis);
428
429 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000430 const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
431 std::ostringstream qss;
432 if(is_quantized)
433 {
434 qss << " Output QuantInfo: " << output->info()->quantization_info();
435 }
Pablo Tello32521432018-11-15 14:43:10 +0000436 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
437 << node.name()
438 << " Type: " << node.type()
439 << " Target: " << TargetInfo::TargetType
Georgios Pinitase2220552018-07-20 13:23:44 +0100440 << " Data Type: " << output->info()->data_type()
441 << " Shape: " << output->info()->tensor_shape()
442 << " Num Inputs: " << inputs.size()
443 << " Axis: " << concat_axis
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000444 << qss.str()
Georgios Pinitase2220552018-07-20 13:23:44 +0100445 << std::endl);
446
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100447 return std::move(func);
Georgios Pinitase2220552018-07-20 13:23:44 +0100448}
449
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100450/** Create a backend convolution layer function
451 *
452 * @tparam ConvolutionLayerFunctions Backend convolution functions
Sheri Zhangfb228032021-11-02 10:45:07 +0000453 * @tparam TargetInfo Target-specific information
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100454 *
455 * @param[in] node Node to create the backend function for
456 * @param[in] ctx Graph context
457 *
458 * @return Backend convolution layer function
459 */
460template <typename ConvolutionLayerFunctions, typename TargetInfo>
461std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
462{
463 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
464
465 // Extract IO and info
466 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
467 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
468 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
469 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
470
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100471 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
472
473 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100474 {
475 biases->info()->set_data_type(DataType::S32);
476 }
477
Georgios Pinitas08346e92018-10-16 19:10:46 +0100478 const PadStrideInfo conv_info = node.convolution_info();
479 const unsigned int num_groups = node.num_groups();
480 const ConvolutionMethod conv_algorithm = node.convolution_method();
481 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
482 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100483
484 // Create and configure function (we assume that functions have been validated before creation)
485 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
486 std::unique_ptr<IFunction> func;
487 std::string func_name;
488
Georgios Pinitase2220552018-07-20 13:23:44 +0100489 if(conv_algorithm == ConvolutionMethod::Winograd)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100490 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100491 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100492 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
493 std::string("WinogradConvolutionLayer"), mm,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100494 input, weights, biases, output, conv_info, fused_act, fast_math);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100495 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100496 else if(conv_algorithm == ConvolutionMethod::Direct)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100497 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100498 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100499 std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
500 std::string("DirectConvolutionLayer"),
Georgios Pinitas08346e92018-10-16 19:10:46 +0100501 input, weights, biases, output, conv_info, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100502 }
503 else if(conv_algorithm == ConvolutionMethod::GEMM)
504 {
505 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
506 std::string("GEMMConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100507 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100508 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100509 }
510 else
511 {
512 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
513 std::string("GenericConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100514 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100515 WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100516 }
517
518 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100519 std::ostringstream qss;
520 if(is_quantized)
521 {
522 qss << " Input QuantInfo: " << input->info()->quantization_info()
523 << " Weights QuantInfo: " << weights->info()->quantization_info()
524 << " Output QuantInfo: " << output->info()->quantization_info();
525 }
Pablo Tello32521432018-11-15 14:43:10 +0000526 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
527 << node.name()
528 << " Type: " << func_name
529 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100530 << " Data Type: " << input->info()->data_type()
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100531 << " Groups: " << num_groups
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100532 << " Input shape: " << input->info()->tensor_shape()
533 << " Weights shape: " << weights->info()->tensor_shape()
534 << " Output shape: " << output->info()->tensor_shape()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000535 << qss.str()
Georgios Pinitas08346e92018-10-16 19:10:46 +0100536 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100537 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100538 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100539}
540
Sheri Zhangfb228032021-11-02 10:45:07 +0000541/** Create a backend convolution layer function with post opreator
542 *
543 * @tparam ConvolutionLayerFunctions Backend convolution functions
544 * @tparam TargetInfo Target-specific information
545 *
546 * @param[in] node Node to create the backend function for
547 * @param[in] ctx Graph context
548 *
549 * @return Backend convolution layer function
550 */
551template <typename ConvolutionLayerFunctions, typename TargetInfo>
552std::unique_ptr<IFunction> create_fused_convolution_with_post_op(FusedConvolutionWithPostOpNode &node, GraphContext &ctx)
553{
554 validate_node<TargetInfo>(node, 4 /* expected inputs */, 1 /* expected outputs */);
555
556 // Extract IO and info
557 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
558 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
559 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
560 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
561
562 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
563
564 if(is_quantized)
565 {
566 biases->info()->set_data_type(DataType::S32);
567 }
568
569 const PadStrideInfo conv_info = node.convolution_info();
570 const unsigned int num_groups = node.num_groups();
571 const ActivationLayerInfo fused_act = node.fused_activation();
572
573 experimental::PostOpList<typename TargetInfo::TensorType *> post_ops;
574
575 auto &post_op_info_list = node.post_op_info_list();
576 for(const auto &post_op_info : post_op_info_list)
577 {
578 switch(post_op_info->type())
579 {
580 case PostOpType::Activation:
581 {
582 const auto act_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoActivation *>(post_op_info.get());
583 post_ops.template push_back_op<experimental::PostOpAct<typename TargetInfo::TensorType *>>(act_info->_act);
584 break;
585 }
586 case PostOpType::Eltwise_Add:
587 {
588 typename TargetInfo::TensorType *add_input = get_backing_tensor<TargetInfo>(node.input(3));
589 const auto eltwise_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoEltwiseAdd *>(post_op_info.get());
590 post_ops.template push_back_op<experimental::PostOpEltwiseAdd<typename TargetInfo::TensorType *>>(add_input, eltwise_info->_prev_op_dst_pos, eltwise_info->_policy);
591 break;
592 }
593 default:
594 {
595 ARM_COMPUTE_ERROR("Unsupported PostOpType");
596 }
597 }
598 }
599
600 // Create and configure function (we assume that functions have been validated before creation)
601 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
602 std::unique_ptr<IFunction> func;
603 std::string func_name;
604
Sheri Zhangc65023e2021-11-03 21:24:00 +0000605 // Fuse convolution with post ops is only supported for conv1x1, which is only implemented as gemmconv2d
Sheri Zhangfb228032021-11-02 10:45:07 +0000606 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
607 std::string("GEMMConvolutionLayer"), mm,
608 input, weights, biases, output, conv_info,
609 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups, post_ops);
610
611 // Log info
612 std::ostringstream qss;
613 if(is_quantized)
614 {
615 qss << " Input QuantInfo: " << input->info()->quantization_info()
616 << " Weights QuantInfo: " << weights->info()->quantization_info()
617 << " Output QuantInfo: " << output->info()->quantization_info();
618 }
619 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
620 << node.name()
621 << " Type: " << func_name
622 << " Target: " << TargetInfo::TargetType
623 << " Data Type: " << input->info()->data_type()
624 << " Groups: " << num_groups
625 << " Input shape: " << input->info()->tensor_shape()
626 << " Weights shape: " << weights->info()->tensor_shape()
627 << " Output shape: " << output->info()->tensor_shape()
628 << qss.str()
629 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
630 << std::endl);
631 return std::move(func);
632}
633
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100634/** Create a backend deconvolution layer function
635 *
636 * @tparam DeconvolutionLayerFunction Backend deconvolution function
637 * @tparam TargetInfo Target-specific information
638 *
639 * @param[in] node Node to create the backend function for
640 * @param[in] ctx Graph context
641 *
642 * @return Backend deconvolution layer function
643 */
644template <typename DeconvolutionLayerFunction, typename TargetInfo>
645std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
646{
647 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
648
649 // Extract IO and info
650 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
651 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
652 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
653 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
654
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100655 const PadStrideInfo deconv_info = node.deconvolution_info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100656
657 // Create and configure function (we assume that functions have been validated before creation)
658 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
659 std::unique_ptr<IFunction> func;
660
661 std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
662 std::string(), mm,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100663 input, weights, biases, output, deconv_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100664
665 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000666 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
667 << node.name()
668 << " Type: " << node.type()
669 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100670 << " Data Type: " << input->info()->data_type()
671 << " Input shape: " << input->info()->tensor_shape()
672 << " Weights shape: " << weights->info()->tensor_shape()
673 << " Output shape: " << output->info()->tensor_shape()
674 << std::endl);
675 return func;
676}
677
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100678/** Create a backend layer depth-wise convolution function
679 *
680 * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
681 * @tparam TargetInfo Target-specific information
682 *
683 * @param[in] node Node to create the backend function for
684 *
685 * @return Backend depth-wise convolution layer function
686 */
Manuel Bottini05069f02019-09-26 17:18:26 +0100687template <typename DepthwiseConvolutionLayer, typename TargetInfo>
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100688std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
689{
690 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
691
692 // Extract IO and info
693 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
694 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
695 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
696 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
697
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100698 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
699
700 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100701 {
702 biases->info()->set_data_type(DataType::S32);
703 }
704
Manuel Bottini05069f02019-09-26 17:18:26 +0100705 const PadStrideInfo conv_info = node.convolution_info();
706 const unsigned int depth_multiplier = node.depth_multiplier();
707 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100708
709 // Create and configure function (we assume that functions have been validated before creation)
710 std::unique_ptr<IFunction> func;
711 std::string func_name;
Manuel Bottini05069f02019-09-26 17:18:26 +0100712
713 std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
714 std::string("DepthwiseConvolutionLayer"),
715 input, weights, biases, output, conv_info, depth_multiplier, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100716
717 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100718 std::ostringstream qss;
719 if(is_quantized)
720 {
721 qss << " Input QuantInfo: " << input->info()->quantization_info()
722 << " Weights QuantInfo: " << weights->info()->quantization_info()
723 << " Output QuantInfo: " << output->info()->quantization_info();
724 }
Pablo Tello32521432018-11-15 14:43:10 +0000725 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
726 << node.name()
727 << " Type: " << func_name
728 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100729 << " Data Type: " << input->info()->data_type()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100730 << " Input shape: " << input->info()->tensor_shape()
731 << " Weights shape: " << weights->info()->tensor_shape()
732 << " Output shape: " << output->info()->tensor_shape()
Georgios Pinitas05045c12018-12-07 18:31:47 +0000733 << " Depth multiplier: " << depth_multiplier
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000734 << qss.str()
Georgios Pinitas60e98252018-10-22 16:17:20 +0100735 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100736 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100737 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100738}
739
thecha010a05e6a2020-08-28 18:40:38 +0100740/** Create a backend depth to space layer function
741 *
742 * @tparam DepthToSpaceLayerNode Function Backend depth to space function
743 * @tparam TargetInfo Target-specific information
744 *
745 * @param[in] node Node to create the backend function for
746 *
747 * @return Backend depth to space layer function
748 */
749template <typename DepthToSpaceLayerFunction, typename TargetInfo>
750std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
751{
752 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
753
754 // Extract IO and info
755 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
756 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
757
758 ARM_COMPUTE_ERROR_ON(input == nullptr);
759 ARM_COMPUTE_ERROR_ON(output == nullptr);
760
761 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000762 auto func = std::make_unique<DepthToSpaceLayerFunction>();
thecha010a05e6a2020-08-28 18:40:38 +0100763 func->configure(input, output, node.block_shape());
764
765 // Log info
766 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
767 << node.name()
768 << " Type: " << node.type()
769 << " Target: " << TargetInfo::TargetType
770 << " Data Type: " << input->info()->data_type()
771 << " Input shape: " << input->info()->tensor_shape()
772 << " Block Size: " << node.block_shape()
773 << " Output shape: " << output->info()->tensor_shape()
774 << std::endl);
775
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100776 return std::move(func);
thecha010a05e6a2020-08-28 18:40:38 +0100777}
778
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000779/** Create a backend dequantize layer function
780 *
781 * @tparam DequantizationLayer Function Backend dequantize function
782 * @tparam TargetInfo Target-specific information
783 *
784 * @param[in] node Node to create the backend function for
785 *
786 * @return Backend dequantize layer function
787 */
788template <typename DequantizationLayerFunction, typename TargetInfo>
789std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
790{
791 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
792
793 // Extract IO and info
794 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
795 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
796
797 ARM_COMPUTE_ERROR_ON(input == nullptr);
798 ARM_COMPUTE_ERROR_ON(output == nullptr);
799
800 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000801 auto func = std::make_unique<DequantizationLayerFunction>();
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000802 func->configure(input, output);
803
804 // Log info
805 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
806 << node.name()
807 << " Type: " << node.type()
808 << " Target: " << TargetInfo::TargetType
809 << " Data Type: " << input->info()->data_type()
810 << " Input shape: " << input->info()->tensor_shape()
811 << " Input quantization info: " << output->info()->quantization_info()
812 << " Output shape: " << output->info()->tensor_shape()
813 << std::endl);
814
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100815 return std::move(func);
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000816}
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000817/** Create a backend detection output layer function
818 *
819 * @tparam DetectionOutputLayer Function Backend detection output function
820 * @tparam TargetInfo Target-specific information
821 *
822 * @param[in] node Node to create the backend function for
823 *
824 * @return Backend detection output layer function
825 */
826template <typename DetectionOutputLayerFunction, typename TargetInfo>
827std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
828{
829 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
830
831 // Extract IO and info
832 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
833 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
834 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
835 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
836 const DetectionOutputLayerInfo detect_info = node.detection_output_info();
837
838 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
839 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
840 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
841 ARM_COMPUTE_ERROR_ON(output == nullptr);
842
843 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000844 auto func = std::make_unique<DetectionOutputLayerFunction>();
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000845 func->configure(input0, input1, input2, output, detect_info);
846
847 // Log info
848 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
849 << node.name()
850 << " Type: " << node.type()
851 << " Target: " << TargetInfo::TargetType
852 << " Data Type: " << input0->info()->data_type()
853 << " Input0 shape: " << input0->info()->tensor_shape()
854 << " Input1 shape: " << input1->info()->tensor_shape()
855 << " Input2 shape: " << input2->info()->tensor_shape()
856 << " Output shape: " << output->info()->tensor_shape()
857 << " DetectionOutputLayer info: " << detect_info
858 << std::endl);
859
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100860 return std::move(func);
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000861}
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000862
863/** Create a backend detection post process layer function
864 *
865 * @tparam DetectionPostProcessLayerFunction Backend detection output function
866 * @tparam TargetInfo Target-specific information
867 *
868 * @param[in] node Node to create the backend function for
869 *
870 * @return Backend detection post process layer function
871 */
872template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
873std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
874{
875 validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
876
877 // Extract IO and info
878 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
879 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
880 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
881 typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
882 typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
883 typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
884 typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
885 const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
886
887 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
888 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
889 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
890 ARM_COMPUTE_ERROR_ON(output0 == nullptr);
891 ARM_COMPUTE_ERROR_ON(output1 == nullptr);
892 ARM_COMPUTE_ERROR_ON(output2 == nullptr);
893 ARM_COMPUTE_ERROR_ON(output3 == nullptr);
894
895 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000896 auto func = std::make_unique<DetectionPostProcessLayerFunction>();
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000897 func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
898
899 // Log info
900 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
901 << node.name()
902 << " Type: " << node.type()
903 << " Target: " << TargetInfo::TargetType
904 << " Data Type: " << input0->info()->data_type()
905 << " Input0 shape: " << input0->info()->tensor_shape()
906 << " Input1 shape: " << input1->info()->tensor_shape()
907 << " Input2 shape: " << input2->info()->tensor_shape()
908 << " Output0 shape: " << output0->info()->tensor_shape()
909 << " Output1 shape: " << output1->info()->tensor_shape()
910 << " Output2 shape: " << output2->info()->tensor_shape()
911 << " Output3 shape: " << output3->info()->tensor_shape()
912 << " DetectionPostProcessLayer info: " << detect_info
913 << std::endl);
914
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100915 return std::move(func);
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000916}
917
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100918/** Create a backend element-wise operation layer function
919 *
920 * @tparam EltwiseFunctions Backend element-wise function
921 * @tparam TargetInfo Target-specific information
922 *
923 * @param[in] node Node to create the backend function for
924 *
925 * @return Backend element-wise operation layer function
926 */
927template <typename EltwiseFunctions, typename TargetInfo>
928std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
929{
930 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
931
932 // Extract IO and info
933 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
934 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
935 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
936 const EltwiseOperation eltwise_op = node.eltwise_operation();
937 const ConvertPolicy convert_policy = node.convert_policy();
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000938 const ActivationLayerInfo act_info = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100939 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
940 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
941 ARM_COMPUTE_ERROR_ON(output == nullptr);
942
943 std::unique_ptr<IFunction> func = nullptr;
944 std::string func_name;
Georgios Pinitase2220552018-07-20 13:23:44 +0100945 if(eltwise_op == EltwiseOperation::Add)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100946 {
947 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
948 std::string("ArithmeticAddition"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000949 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100950 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100951 else if(eltwise_op == EltwiseOperation::Sub)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100952 {
953 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
954 std::string("ArithmeticSubtraction"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000955 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100956 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100957 else if(eltwise_op == EltwiseOperation::Mul)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100958 {
959 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
960 std::string("PixelWiseMultiplication"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000961 input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100962 }
thecha01f8e35842020-07-28 17:28:17 +0100963 else if(eltwise_op == EltwiseOperation::Max)
964 {
965 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
966 std::string("ElementwiseMaximum"),
967 input1, input2, output, act_info);
968 }
Alessandro Navone6413e492021-02-02 11:39:05 +0000969 else if(eltwise_op == EltwiseOperation::Div)
970 {
971 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Division>(
972 std::string("ArithmeticDivision"),
973 input1, input2, output, act_info);
974 }
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100975 else
976 {
977 ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
978 }
979
980 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000981 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
982 << node.name()
983 << " Type: " << node.type()
984 << " Target: " << TargetInfo::TargetType
985 << " Operation: " << func_name
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100986 << " Data Type: " << input1->info()->data_type()
Pablo Tello32521432018-11-15 14:43:10 +0000987 << " Shape: " << input1->info()->tensor_shape()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100988 << std::endl);
989
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100990 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100991}
992
Sheri Zhang16dddd22020-05-27 15:03:48 +0100993/** Create a backend unary element-wise operation layer function
994 *
995 * @tparam UnaryEltwiseFunctions Backend unary element-wise function
996 * @tparam TargetInfo Target-specific information
997 *
998 * @param[in] node Node to create the backend function for
999 *
1000 * @return Backend unary element-wise operation layer function
1001 */
1002template <typename UnaryEltwiseFunctions, typename TargetInfo>
1003std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
1004{
1005 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1006
1007 // Extract IO and info
1008 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1009 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1010 const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
1011
1012 ARM_COMPUTE_ERROR_ON(input == nullptr);
1013 ARM_COMPUTE_ERROR_ON(output == nullptr);
1014
1015 std::unique_ptr<IFunction> func = nullptr;
1016 std::string func_name;
1017 if(eltwise_op == UnaryEltwiseOperation::Exp)
1018 {
1019 std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
1020 std::string("Exp"),
1021 input, output);
1022 }
1023 else
1024 {
1025 ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
1026 }
1027
1028 // Log info
1029 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1030 << node.name()
1031 << " Type: " << node.type()
1032 << " Target: " << TargetInfo::TargetType
1033 << " Operation: " << func_name
1034 << " Data Type: " << input->info()->data_type()
1035 << " Shape: " << input->info()->tensor_shape()
1036 << std::endl);
1037
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001038 return std::move(func);
Sheri Zhang16dddd22020-05-27 15:03:48 +01001039}
1040
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001041/** Create a backend flatten layer function
1042 *
1043 * @tparam FlattenLayerFunction Backend flatten function
1044 * @tparam TargetInfo Target-specific information
1045 *
1046 * @param[in] node Node to create the backend function for
1047 *
1048 * @return Backend flatten layer function
1049 */
1050template <typename FlattenLayerFunction, typename TargetInfo>
1051std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
1052{
1053 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1054
1055 // Extract IO and info
1056 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1057 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1058
Georgios Pinitase2220552018-07-20 13:23:44 +01001059 ARM_COMPUTE_ERROR_ON(input == nullptr);
1060 ARM_COMPUTE_ERROR_ON(output == nullptr);
1061
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001062 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001063 auto func = std::make_unique<FlattenLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001064 func->configure(input, output);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001065
1066 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001067 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1068 << node.name()
1069 << " Type: " << node.type()
1070 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001071 << " Data Type: " << input->info()->data_type()
1072 << " Input shape: " << input->info()->tensor_shape()
1073 << " Output shape: " << output->info()->tensor_shape()
1074 << std::endl);
1075
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001076 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001077}
1078
1079/** Create a backend fully connected layer function
1080 *
1081 * @tparam FullyConnectedLayerFunction Backend fully-connected function
1082 * @tparam TargetInfo Target-specific information
1083 *
1084 * @param[in] node Node to create the backend function for
1085 * @param[in] ctx Graph context
1086 *
1087 * @return Backend fully connected layer function
1088 */
1089template <typename FullyConnectedLayerFunction, typename TargetInfo>
1090std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
1091{
1092 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1093
1094 // Extract IO and info
1095 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1096 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
1097 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
1098 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
cfRodf2c022e2021-11-05 11:29:53 +00001099 FullyConnectedLayerInfo fc_info = node.info();
1100 fc_info.enable_fast_math = (node.fast_math_hint() == FastMathHint::Enabled);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001101
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001102 ARM_COMPUTE_ERROR_ON(input == nullptr);
1103 ARM_COMPUTE_ERROR_ON(weights == nullptr);
1104 ARM_COMPUTE_ERROR_ON(output == nullptr);
1105
Georgios Pinitase2220552018-07-20 13:23:44 +01001106 // Create and configure function
Michalis Spyrou1a569a32019-09-10 17:20:34 +01001107 auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1108 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001109 auto func = std::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
Georgios Pinitase2220552018-07-20 13:23:44 +01001110 func->configure(input, weights, biases, output, fc_info);
1111
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001112 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1113
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001114 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001115 std::ostringstream qss;
1116 if(is_quantized)
1117 {
1118 qss << " Input QuantInfo: " << input->info()->quantization_info()
1119 << " Weights QuantInfo: " << weights->info()->quantization_info()
1120 << " Output QuantInfo: " << output->info()->quantization_info();
1121 }
Pablo Tello32521432018-11-15 14:43:10 +00001122 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1123 << node.name()
1124 << " Type: " << node.type()
1125 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001126 << " Data Type: " << input->info()->data_type()
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001127 << qss.str()
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001128 << " Input shape: " << input->info()->tensor_shape()
1129 << " Weights shape: " << weights->info()->tensor_shape()
1130 << " Output shape: " << output->info()->tensor_shape()
1131 << std::endl);
1132
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001133 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001134}
1135
Manuel Bottini5209be52019-02-13 16:34:56 +00001136/** Create a backend generate proposals layer function
1137 *
1138 * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1139 * @tparam TargetInfo Target-specific information
1140 *
1141 * @param[in] node Node to create the backend function for
1142 * @param[in] ctx Graph context
1143 *
1144 * @return Backend generate proposals layer function
1145 */
1146template <typename GenerateProposalsLayerFunction, typename TargetInfo>
1147std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1148{
1149 validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1150
1151 // Extract IO and info
1152 typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1153 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1154 typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1155 typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1156 typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1157 typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1158 const GenerateProposalsInfo info = node.info();
1159
1160 ARM_COMPUTE_ERROR_ON(scores == nullptr);
1161 ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1162 ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1163 ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1164 ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1165
1166 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001167 auto func = std::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Manuel Bottini5209be52019-02-13 16:34:56 +00001168 func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1169
1170 // Log info
1171 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1172 << " Target " << TargetInfo::TargetType
1173 << " Data Type: " << scores->info()->data_type()
1174 << " Scores shape: " << scores->info()->tensor_shape()
1175 << " Deltas shape: " << deltas->info()->tensor_shape()
1176 << " Anchors shape: " << anchors->info()->tensor_shape()
1177 << " Proposals shape: " << proposals->info()->tensor_shape()
1178 << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1179 << " Scores Out shape: " << scores_out->info()->tensor_shape()
1180 << std::endl);
1181
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001182 return std::move(func);
Manuel Bottini5209be52019-02-13 16:34:56 +00001183}
1184
thecha013603aff2020-09-01 14:52:38 +01001185/** Create a backend l2 normalization layer function
1186 *
1187 * @tparam NormalizationLayerFunction Backend normalization function
1188 * @tparam TargetInfo Target-specific information
1189 *
1190 * @param[in] node Node to create the backend function for
1191 * @param[in] ctx Graph context
1192 *
1193 * @return Backend normalization layer function
1194 */
1195template <typename L2NormalizeLayerFunction, typename TargetInfo>
1196std::unique_ptr<IFunction> create_l2_normalize_layer(L2NormalizeLayerNode &node, GraphContext &ctx)
1197{
1198 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1199
1200 // Extract IO and info
1201 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1202 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1203 int axis = node.axis();
1204 float epsilon = node.epsilon();
1205
1206 ARM_COMPUTE_ERROR_ON(input == nullptr);
1207 ARM_COMPUTE_ERROR_ON(output == nullptr);
1208
1209 // Create and configure function
1210 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001211 auto func = std::make_unique<L2NormalizeLayerFunction>(mm);
thecha013603aff2020-09-01 14:52:38 +01001212 func->configure(input, output, axis, epsilon);
1213
1214 // Log info
1215 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1216 << node.name()
1217 << " Type: " << node.type()
1218 << " Target: " << TargetInfo::TargetType
1219 << " Data Type: " << input->info()->data_type()
1220 << " Input shape: " << input->info()->tensor_shape()
1221 << " Output shape: " << output->info()->tensor_shape()
1222 << " Axis: " << axis
1223 << " Epsilon: " << epsilon
1224 << std::endl);
1225
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001226 return std::move(func);
thecha013603aff2020-09-01 14:52:38 +01001227}
1228
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001229/** Create a backend normalization layer function
1230 *
1231 * @tparam NormalizationLayerFunction Backend normalization function
1232 * @tparam TargetInfo Target-specific information
1233 *
1234 * @param[in] node Node to create the backend function for
1235 * @param[in] ctx Graph context
1236 *
1237 * @return Backend normalization layer function
1238 */
1239template <typename NormalizationLayerFunction, typename TargetInfo>
1240std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1241{
1242 ARM_COMPUTE_UNUSED(ctx);
1243
1244 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1245
1246 // Extract IO and info
1247 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1248 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1249 const NormalizationLayerInfo norm_info = node.normalization_info();
1250 ARM_COMPUTE_ERROR_ON(input == nullptr);
1251 ARM_COMPUTE_ERROR_ON(output == nullptr);
1252
1253 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001254 auto func = std::make_unique<NormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001255 func->configure(input, output, norm_info);
1256
1257 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001258 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1259 << node.name()
1260 << " Type: " << node.type()
1261 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001262 << " Data Type: " << input->info()->data_type()
1263 << " Input shape: " << input->info()->tensor_shape()
1264 << " Output shape: " << output->info()->tensor_shape()
1265 << " Normalization info: " << norm_info.type()
1266 << std::endl);
1267
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001268 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001269}
1270
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001271/** Create a backend normalize planar YUV layer function
1272 *
1273 * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1274 * @tparam TargetInfo Target-specific information
1275 *
1276 * @param[in] node Node to create the backend function for
1277 *
1278 * @return Backend normalize plnar YUV layer function
1279 */
1280template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
1281std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1282{
1283 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1284
1285 // Extract IO and info
1286 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1287 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1288 typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1289 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1290 ARM_COMPUTE_ERROR_ON(input == nullptr);
1291 ARM_COMPUTE_ERROR_ON(mean == nullptr);
1292 ARM_COMPUTE_ERROR_ON(std == nullptr);
1293 ARM_COMPUTE_ERROR_ON(output == nullptr);
1294
1295 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001296 auto func = std::make_unique<NormalizePlanarYUVLayerFunction>();
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001297 func->configure(input, output, mean, std);
1298
1299 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001300 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1301 << node.name()
1302 << " Type: " << node.type()
1303 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001304 << " Data Type: " << input->info()->data_type()
1305 << " Shape: " << input->info()->tensor_shape()
1306 << std::endl);
1307
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001308 return std::move(func);
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001309}
1310
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001311/** Create a backend pad layer function
1312 *
1313 * @tparam PadLayerFunction Backend pad function
1314 * @tparam TargetInfo Target-specific information
1315 *
1316 * @param[in] node Node to create the backend function for
1317 *
1318 * @return Backend pad layer function
1319 */
1320template <typename PadLayerFunction, typename TargetInfo>
1321std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1322{
1323 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1324
1325 // Extract IO and info
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001326 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1327 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1328 const PaddingList &padding = node.padding();
1329 const PixelValue pad_value = node.pad_value();
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001330 ARM_COMPUTE_ERROR_ON(input == nullptr);
1331 ARM_COMPUTE_ERROR_ON(output == nullptr);
1332
1333 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001334 auto func = std::make_unique<PadLayerFunction>();
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001335 func->configure(input, output, padding, pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001336
1337 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001338 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1339 << node.name()
1340 << " Type: " << node.type()
1341 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001342 << " Data Type: " << input->info()->data_type()
1343 << " Input shape: " << input->info()->tensor_shape()
1344 << " Output shape: " << output->info()->tensor_shape()
1345 << std::endl);
1346
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001347 return std::move(func);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001348}
1349
Georgios Pinitas57c48242018-08-02 13:41:49 +01001350/** Create a backend permute layer function
1351 *
1352 * @tparam PermuteLayerFunction Backend permute function
1353 * @tparam TargetInfo Target-specific information
1354 *
1355 * @param[in] node Node to create the backend function for
1356 *
1357 * @return Backend permute layer function
1358 */
1359template <typename PermuteLayerFunction, typename TargetInfo>
1360std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1361{
1362 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1363
1364 // Extract IO and info
1365 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1366 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1367 const PermutationVector &perm = node.permutation_vector();
1368 ARM_COMPUTE_ERROR_ON(input == nullptr);
1369 ARM_COMPUTE_ERROR_ON(output == nullptr);
1370
1371 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001372 auto func = std::make_unique<PermuteLayerFunction>();
Georgios Pinitas57c48242018-08-02 13:41:49 +01001373 func->configure(input, output, perm);
1374
1375 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001376 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1377 << node.name()
1378 << " Type: " << node.type()
1379 << " Target: " << TargetInfo::TargetType
Georgios Pinitas57c48242018-08-02 13:41:49 +01001380 << " Data Type: " << input->info()->data_type()
1381 << " Input shape: " << input->info()->tensor_shape()
1382 << " Output shape: " << output->info()->tensor_shape()
1383 << " Permutation vector: " << perm
1384 << std::endl);
1385
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001386 return std::move(func);
Georgios Pinitas57c48242018-08-02 13:41:49 +01001387}
1388
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001389/** Create a backend pooling layer function
1390 *
1391 * @tparam PoolingLayerFunction Backend pooling function
1392 * @tparam TargetInfo Target-specific information
1393 *
1394 * @param[in] node Node to create the backend function for
1395 *
1396 * @return Backend pooling layer function
1397 */
1398template <typename PoolingLayerFunction, typename TargetInfo>
1399std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1400{
1401 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1402
1403 // Extract IO and info
1404 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1405 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1406 const PoolingLayerInfo pool_info = node.pooling_info();
1407 ARM_COMPUTE_ERROR_ON(input == nullptr);
1408 ARM_COMPUTE_ERROR_ON(output == nullptr);
1409
1410 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001411 auto func = std::make_unique<PoolingLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001412 func->configure(input, output, pool_info);
1413
1414 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001415 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1416 << node.name()
1417 << " Type: " << node.type()
1418 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001419 << " Data Type: " << input->info()->data_type()
1420 << " Input shape: " << input->info()->tensor_shape()
1421 << " Output shape: " << output->info()->tensor_shape()
Sang-Hoon Park0cb3da62020-01-15 12:39:56 +00001422 << " Pooling info: " << pool_info.pool_type
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001423 << std::endl);
1424
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001425 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001426}
1427
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001428/** Create a backend PRelu layer function
1429 *
1430 * @tparam PReluFunction Backend PRelu function
1431 * @tparam TargetInfo Target-specific information
1432 *
1433 * @param[in] node Node to create the backend function for
1434 *
1435 * @return Backend PRelu layer function
1436 */
1437template <typename PReluFunction, typename TargetInfo>
1438std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1439{
1440 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1441
1442 // Extract IO and info
1443 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1444 typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1445 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1446 ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1447 ARM_COMPUTE_ERROR_ON(output == nullptr);
1448
1449 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001450 auto func = std::make_unique<PReluFunction>();
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001451 func->configure(input, alpha, output);
1452
1453 // Log info
1454 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1455 << node.name()
1456 << " Type: " << node.type()
1457 << " Target: " << TargetInfo::TargetType
1458 << " Data Type: " << input->info()->data_type()
1459 << " Input shape: " << input->info()->tensor_shape()
1460 << " Output shape: " << output->info()->tensor_shape()
1461 << std::endl);
1462
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001463 return std::move(func);
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001464}
1465
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001466/** Create a backend print layer function
1467 *
1468 * @tparam TargetInfo Target-specific information
1469 *
1470 * @param[in] node Node to create the backend function for
1471 *
1472 * @return Backend print layer function
1473 */
1474template <typename TargetInfo>
1475std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1476{
1477 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1478
1479 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1480 ARM_COMPUTE_ERROR_ON(input == nullptr);
1481 ARM_COMPUTE_UNUSED(input);
1482
1483 // Log info
1484 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1485 << node.name()
1486 << " Type: " << node.type()
1487 << " Target: " << TargetInfo::TargetType
1488 << " Data Type: " << input->info()->data_type()
1489 << " Input shape: " << input->info()->tensor_shape()
1490 << std::endl);
1491
1492 return nullptr;
1493}
1494
Pablo Tello32521432018-11-15 14:43:10 +00001495/** Create a backend priorbox layer function
1496 *
1497 * @tparam PriorBoxLayerFunction Backend priorbox function
1498 * @tparam TargetInfo Target-specific information
1499 *
1500 * @param[in] node Node to create the backend function for
1501 *
1502 * @return Backend priorbox layer function
1503 */
1504template <typename PriorBoxLayerFunction, typename TargetInfo>
1505std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1506{
1507 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1508
1509 // Extract IO and info
1510 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1511 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1512 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1513 const PriorBoxLayerInfo prior_info = node.priorbox_info();
1514 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1515 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1516 ARM_COMPUTE_ERROR_ON(output == nullptr);
1517
1518 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001519 auto func = std::make_unique<PriorBoxLayerFunction>();
Pablo Tello32521432018-11-15 14:43:10 +00001520 func->configure(input0, input1, output, prior_info);
1521
1522 // Log info
1523 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1524 << node.name()
1525 << " Type: " << node.type()
1526 << " Target: " << TargetInfo::TargetType
1527 << " Data Type: " << input0->info()->data_type()
1528 << " Input0 shape: " << input0->info()->tensor_shape()
1529 << " Input1 shape: " << input1->info()->tensor_shape()
1530 << " Output shape: " << output->info()->tensor_shape()
1531 << " PriorBoxLayer info: " << prior_info
1532 << std::endl);
1533
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001534 return std::move(func);
Pablo Tello32521432018-11-15 14:43:10 +00001535}
1536
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001537/** Create a backend quantization layer function
1538 *
1539 * @tparam QuantizationLayerFunction Backend quantization function
1540 * @tparam TargetInfo Target-specific information
1541 *
1542 * @param[in] node Node to create the backend function for
1543 *
1544 * @return Backend quantization layer function
1545 */
1546template <typename QuantizationLayerFunction, typename TargetInfo>
1547std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1548{
1549 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1550
1551 // Extract IO and info
1552 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1553 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1554 ARM_COMPUTE_ERROR_ON(input == nullptr);
1555 ARM_COMPUTE_ERROR_ON(output == nullptr);
1556
1557 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001558 auto func = std::make_unique<QuantizationLayerFunction>();
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001559 func->configure(input, output);
1560
1561 // Log info
1562 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1563 << node.name()
1564 << " Type: " << node.type()
1565 << " Target: " << TargetInfo::TargetType
1566 << " Data Type: " << input->info()->data_type()
1567 << " Input shape: " << input->info()->tensor_shape()
1568 << " Output shape: " << output->info()->tensor_shape()
1569 << std::endl);
1570
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001571 return std::move(func);
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001572}
1573
thecha01d64444b2020-09-07 14:50:21 +01001574/** Create a backend reduction operation layer function
1575 *
1576 * @tparam ReductionOperationFunction Backend reduction operation function
1577 * @tparam TargetInfo Target-specific information
1578 *
1579 * @param[in] node Node to create the backend function for
1580 * @param[in] ctx Graph context
1581 *
1582 * @return Backend reduction sum layer function
1583 */
1584template <typename ReductionOperationFunction, typename TargetInfo>
1585std::unique_ptr<IFunction> create_reduction_operation_layer(ReductionLayerNode &node, GraphContext &ctx)
1586{
1587 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1588
1589 // Extract IO and info
1590 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1591 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1592 ReductionOperation op = node.op();
1593 int axis = node.axis();
1594 bool keep_dims = node.keep_dims();
1595 ARM_COMPUTE_ERROR_ON(input == nullptr);
1596 ARM_COMPUTE_ERROR_ON(output == nullptr);
1597
1598 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001599 auto func = std::make_unique<ReductionOperationFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
thecha01d64444b2020-09-07 14:50:21 +01001600 func->configure(input, output, axis, op, keep_dims);
1601
1602 // Log info
1603 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1604 << node.name()
1605 << " Type: " << node.type()
1606 << " Target: " << TargetInfo::TargetType
1607 << " Data Type: " << input->info()->data_type()
1608 << " Input shape: " << input->info()->tensor_shape()
1609 << " Output shape: " << output->info()->tensor_shape()
1610 << " Operation: " << op
1611 << " Axis: " << axis
1612 << " Keep dimensions:" << keep_dims
1613 << std::endl);
1614
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001615 return std::move(func);
thecha01d64444b2020-09-07 14:50:21 +01001616}
1617
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001618/** Create a backend reorg layer function
1619 *
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001620 * @tparam ReorgLayerFunction Backend reorg function
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001621 * @tparam TargetInfo Target-specific information
1622 *
1623 * @param[in] node Node to create the backend function for
1624 *
1625 * @return Backend reshape layer function
1626 */
1627template <typename ReorgLayerFunction, typename TargetInfo>
1628std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1629{
1630 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1631
1632 // Extract IO and info
1633 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1634 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1635 ARM_COMPUTE_ERROR_ON(input == nullptr);
1636 ARM_COMPUTE_ERROR_ON(output == nullptr);
1637
1638 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001639 auto func = std::make_unique<ReorgLayerFunction>();
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001640 func->configure(input, output, node.stride());
1641
1642 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001643 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1644 << node.name()
1645 << " Type: " << node.type()
1646 << " Target: " << TargetInfo::TargetType
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001647 << " Data Type: " << input->info()->data_type()
1648 << " Input shape: " << input->info()->tensor_shape()
1649 << " Output shape: " << output->info()->tensor_shape()
1650 << std::endl);
1651
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001652 return std::move(func);
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001653}
1654
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001655/** Create a backend reshape layer function
1656 *
1657 * @tparam ReshapeLayerFunction Backend reshape function
1658 * @tparam TargetInfo Target-specific information
1659 *
1660 * @param[in] node Node to create the backend function for
1661 *
1662 * @return Backend reshape layer function
1663 */
1664template <typename ReshapeLayerFunction, typename TargetInfo>
1665std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1666{
1667 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1668
1669 // Extract IO and info
1670 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1671 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1672 ARM_COMPUTE_ERROR_ON(input == nullptr);
1673 ARM_COMPUTE_ERROR_ON(output == nullptr);
1674
1675 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001676 auto func = std::make_unique<ReshapeLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001677 func->configure(input, output);
1678
1679 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001680 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1681 << node.name()
1682 << " Type: " << node.type()
1683 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001684 << " Data Type: " << input->info()->data_type()
1685 << " Input shape: " << input->info()->tensor_shape()
1686 << " Output shape: " << output->info()->tensor_shape()
1687 << std::endl);
1688
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001689 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001690}
1691
1692/** Create a backend resize layer function
1693 *
1694 * @tparam ResizeLayerFunction Backend resize function
1695 * @tparam TargetInfo Target-specific information
1696 *
1697 * @param[in] node Node to create the backend function for
1698 *
1699 * @return Backend resize layer function
1700 */
1701template <typename ResizeLayerFunction, typename TargetInfo>
1702std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1703{
1704 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1705
1706 // Extract IO and info
1707 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1708 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1709 ARM_COMPUTE_ERROR_ON(input == nullptr);
1710 ARM_COMPUTE_ERROR_ON(output == nullptr);
1711 const InterpolationPolicy policy = node.policy();
1712
1713 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001714 auto func = std::make_unique<ResizeLayerFunction>();
Georgios Pinitasc53266e2020-12-09 03:11:53 +00001715 func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT, PixelValue(), SamplingPolicy::CENTER, false, false });
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001716
1717 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001718 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1719 << node.name()
1720 << " Type: " << node.type()
1721 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001722 << " Data Type: " << input->info()->data_type()
1723 << " Input shape: " << input->info()->tensor_shape()
1724 << " Output shape: " << output->info()->tensor_shape()
1725 << " Interpolation: " << policy
1726 << std::endl);
1727
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001728 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001729}
1730
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001731/** Create a backend ROI align layer function
1732 *
1733 * @tparam ROIAlignLayerFunction ROI Align function
1734 * @tparam TargetInfo Target-specific information
1735 *
1736 * @param[in] node Node to create the backend function for
1737 *
1738 * @return ROI Align layer function
1739 */
1740template <typename ROIAlignLayerFunction, typename TargetInfo>
1741std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1742{
1743 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1744
1745 // Extract IO and info
1746 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1747 typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1748 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1749 ARM_COMPUTE_ERROR_ON(input == nullptr);
1750 ARM_COMPUTE_ERROR_ON(output == nullptr);
1751 ARM_COMPUTE_ERROR_ON(rois == nullptr);
1752
1753 const ROIPoolingLayerInfo pool_info = node.pooling_info();
1754
1755 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001756 auto func = std::make_unique<ROIAlignLayerFunction>();
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001757
1758 func->configure(input, rois, output, pool_info);
1759
1760 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +00001761 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1762 << node.name()
1763 << " Type: " << node.type()
1764 << " Target: " << TargetInfo::TargetType
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001765 << " Data Type: " << input->info()->data_type()
1766 << " Input shape: " << input->info()->tensor_shape()
1767 << " Output shape: " << output->info()->tensor_shape()
1768 << " ROIs shape: " << rois->info()->tensor_shape()
1769 << " ROIPooling width: " << pool_info.pooled_width()
1770 << " ROIPooling height: " << pool_info.pooled_height()
1771 << std::endl);
1772
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001773 return std::move(func);
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001774}
1775
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001776/** Create a backend slice layer function
1777 *
1778 * @tparam SliceLayerFunction Backend slice function
1779 * @tparam TargetInfo Target-specific information
1780 *
1781 * @param[in] node Node to create the backend function for
1782 *
1783 * @return Backend slice layer function
1784 */
1785template <typename SliceLayerFunction, typename TargetInfo>
1786std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1787{
1788 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1789
1790 // Extract IO and info
1791 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1792 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1793 ARM_COMPUTE_ERROR_ON(input == nullptr);
1794 ARM_COMPUTE_ERROR_ON(output == nullptr);
1795
1796 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001797 auto func = std::make_unique<SliceLayerFunction>();
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001798 func->configure(input, output, node.starts(), node.ends());
1799
1800 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001801 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1802 << node.name()
1803 << " Type: " << node.type()
1804 << " Target: " << TargetInfo::TargetType
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001805 << " Data Type: " << input->info()->data_type()
1806 << " Input shape: " << input->info()->tensor_shape()
1807 << " Output shape: " << output->info()->tensor_shape()
1808 << std::endl);
1809
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001810 return std::move(func);
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001811}
1812
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001813/** Create a backend softmax layer function
1814 *
1815 * @tparam SoftmaxLayerFunction Backend softmax function
1816 * @tparam TargetInfo Target-specific information
1817 *
1818 * @param[in] node Node to create the backend function for
1819 * @param[in] ctx Graph context
1820 *
1821 * @return Backend softmax layer function
1822 */
1823template <typename SoftmaxLayerFunction, typename TargetInfo>
1824std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1825{
1826 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1827
1828 // Extract IO and info
1829 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1830 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1831 const float beta = node.beta();
1832 ARM_COMPUTE_ERROR_ON(input == nullptr);
1833 ARM_COMPUTE_ERROR_ON(output == nullptr);
1834
1835 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001836 auto func = std::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001837 func->configure(input, output, beta);
1838
1839 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001840 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1841 << node.name()
1842 << " Type: " << node.type()
1843 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001844 << " Data Type: " << input->info()->data_type()
1845 << " Input shape: " << input->info()->tensor_shape()
1846 << " Output shape: " << output->info()->tensor_shape()
1847 << std::endl);
1848
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001849 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001850}
Michele Di Giorgioec699752019-03-22 15:25:32 +00001851
1852/** Create a backend layer stack function
1853 *
1854 * @tparam StackLayerFunction Backend stack function
1855 * @tparam TargetInfo Target-specific information
1856 *
1857 * @param[in] node Node to create the backend function for
1858 *
1859 * @return Backend stack layer function
1860 */
1861template <typename StackLayerFunction, typename TargetInfo>
1862std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1863{
1864 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1865 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1866
1867 // Extract IO and info
1868 std::vector<typename TargetInfo::TensorType *> inputs;
1869 for(unsigned int i = 0; i < node.num_inputs(); ++i)
1870 {
1871 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1872 }
1873 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1874 const int axis = node.axis();
1875
1876 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001877 auto func = std::make_unique<StackLayerFunction>();
Michele Di Giorgioec699752019-03-22 15:25:32 +00001878 func->configure(inputs, axis, output);
1879
1880 // Log info
1881 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1882 << node.name()
1883 << " Type: " << node.type()
1884 << " Target: " << TargetInfo::TargetType
1885 << " Data Type: " << output->info()->data_type()
1886 << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1887 << " Output shape: " << output->info()->tensor_shape()
1888 << " Num Inputs: " << inputs.size()
1889 << " Axis: " << axis
1890 << std::endl);
1891
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001892 return std::move(func);
Michele Di Giorgioec699752019-03-22 15:25:32 +00001893}
thecha012bfadd92020-08-12 17:25:51 +01001894
1895/** Create a backend slice layer function
1896 *
1897 * @tparam StridedSliceLayerFunction Backend strided slice function
1898 * @tparam TargetInfo Target-specific information
1899 *
1900 * @param[in] node Node to create the backend function for
1901 *
1902 * @return Backend strided slice layer function
1903 */
1904template <typename StridedSliceLayerFunction, typename TargetInfo>
1905std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1906{
1907 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1908
1909 // Extract IO and info
1910 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1911 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1912 Coordinates starts = node.starts();
1913 Coordinates ends = node.ends();
1914 BiStrides strides = node.strides();
1915 StridedSliceLayerInfo info = node.strided_slice_info();
1916
1917 ARM_COMPUTE_ERROR_ON(input == nullptr);
1918 ARM_COMPUTE_ERROR_ON(output == nullptr);
1919
1920 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001921 auto func = std::make_unique<StridedSliceLayerFunction>();
thecha012bfadd92020-08-12 17:25:51 +01001922 func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
1923
1924 // Log info
1925 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1926 << node.name()
1927 << " Type: " << node.type()
1928 << " Target: " << TargetInfo::TargetType
1929 << " Data Type: " << input->info()->data_type()
1930 << " Input shape: " << input->info()->tensor_shape()
1931 << " Output shape: " << output->info()->tensor_shape()
1932 << std::endl);
1933
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001934 return std::move(func);
thecha012bfadd92020-08-12 17:25:51 +01001935}
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001936} // namespace detail
1937} // namespace backends
1938} // namespace graph
1939} // namespace arm_compute
1940
Michalis Spyrouf4643372019-11-29 16:17:13 +00001941#endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */