blob: 9830290d0f8b0010b5afd516155c8231e13d98b3 [file] [log] [blame]
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001/*
Alessandro Navone6413e492021-02-02 11:39:05 +00002 * Copyright (c) 2018-2021 Arm Limited.
Georgios Pinitasda2491f2018-06-01 17:49:09 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25#define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
Georgios Pinitasda2491f2018-06-01 17:49:09 +010026
27#include "arm_compute/graph/Logger.h"
28#include "arm_compute/graph/Tensor.h"
29#include "arm_compute/graph/TypePrinter.h"
30#include "arm_compute/graph/Types.h"
Georgios Pinitas9e4824c2019-04-12 13:15:58 +010031#include "arm_compute/graph/Utils.h"
giuros01acce5042019-02-21 17:32:34 +000032#include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
Manuel Bottinibffb41e2019-06-20 16:00:27 +010033#include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010034#include "arm_compute/graph/backends/Utils.h"
35#include "arm_compute/graph/nodes/Nodes.h"
36
37#include "arm_compute/core/Error.h"
38#include "arm_compute/core/Helpers.h"
39#include "arm_compute/core/ITensorInfo.h"
Sang-Hoon Park68dd25f2020-10-19 16:00:11 +010040#include "support/Cast.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010041
42namespace arm_compute
43{
44namespace graph
45{
46namespace backends
47{
48namespace detail
49{
50/** Returns backing tensor of a given tensor
51 *
52 * @tparam TargetInfo Target information
53 *
54 * @param[in] tensor Tensor to extract the backing tensor from
55 *
56 * @return Backing tensor if present else nullptr
57 */
58template <typename TargetInfo>
59typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
60{
61 typename TargetInfo::TensorType *backing_tensor = nullptr;
62 if(tensor != nullptr)
63 {
64 ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
65 // Get backing tensor handle
66 ITensorHandle *tensor_handle = tensor->handle();
67 // Get backing tensor
68 backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
69 }
70
71 return backing_tensor;
72}
73
74template <typename TargetInfo>
75void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
76{
77 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
Pablo Tello32521432018-11-15 14:43:10 +000078 << " Target: " << TargetInfo::TargetType
79 << " ID: " << node.id()
80 << node.name()
Georgios Pinitasda2491f2018-06-01 17:49:09 +010081 << std::endl);
82
83 ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
84 ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
85 ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010086 ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
Georgios Pinitasda2491f2018-06-01 17:49:09 +010087}
88
89/** Creates a backend activation layer function
90 *
91 * @tparam ActivationLayerFunction Backend activation function
92 * @tparam TargetInfo Target-specific information
93 *
94 * @param[in] node Node to create the backend function for
95 *
96 * @return Backend activation layer function
97 */
98template <typename ActivationLayerFunction, typename TargetInfo>
99std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
100{
101 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
102
103 // Extract IO and info
104 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
105 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
106 const ActivationLayerInfo act_info = node.activation_info();
107
108 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000109 auto func = std::make_unique<ActivationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100110 func->configure(input, output, act_info);
111
Pablo Tello32521432018-11-15 14:43:10 +0000112 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
113 << node.name()
114 << " Type: " << node.type()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000115 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100116 << " Data Type: " << input->info()->data_type()
117 << " Shape: " << input->info()->tensor_shape()
118 << " Activation function: " << act_info.activation()
119 << " a: " << act_info.a()
120 << " b: " << act_info.b()
121 << " InPlace : " << is_in_place_operation(input, output)
122 << std::endl);
123
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100124 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100125}
126
thecha01e8f05da2020-08-24 17:21:41 +0100127/** Creates a backend argminmax layer function
128 *
129 * @tparam ArgMinMaxLayerFunction Backend activation function
130 * @tparam TargetInfo Target-specific information
131 *
132 * @param[in] node Node to create the backend function for
133 *
134 * @return Backend argminmax layer function
135 */
136template <typename ArgMinMaxLayerFunction, typename TargetInfo>
137std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
138{
139 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
140
141 // Extract IO and info
142 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
143 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
144 const ReductionOperation op = node.reduction_operation();
145 unsigned int axis = node.axis();
146
147 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000148 auto func = std::make_unique<ArgMinMaxLayerFunction>();
thecha01e8f05da2020-08-24 17:21:41 +0100149 func->configure(input, axis, output, op);
150
151 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
152 << node.name()
153 << " Type: " << node.type()
154 << " Target: " << TargetInfo::TargetType
155 << " Data Type: " << input->info()->data_type()
156 << " Shape: " << input->info()->tensor_shape()
157 << " Reduction Operation: " << op
158 << " axis: " << axis
159 << std::endl);
160
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100161 return std::move(func);
thecha01e8f05da2020-08-24 17:21:41 +0100162}
163
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100164/** Create a backend batch normalization layer function
165 *
166 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
167 * @tparam TargetInfo Target-specific information
168 *
169 * @param[in] node Node to create the backend function for
170 *
171 * @return Backend batch normalization layer function
172 */
173template <typename BatchNormalizationLayerFunction, typename TargetInfo>
174std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
175{
176 validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
177
178 // Extract IO and info
giuros01acce5042019-02-21 17:32:34 +0000179 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
180 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
181 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
182 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
183 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
184
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100185 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
186 const float epsilon = node.epsilon();
187 const ActivationLayerInfo fused_act = node.fused_activation();
188
189 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000190 auto func = std::make_unique<BatchNormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100191 func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
192
193 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000194 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
195 << node.name()
196 << " Type: " << node.type()
197 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100198 << " Data Type: " << input->info()->data_type()
199 << " Shape: " << input->info()->tensor_shape()
200 << " Epsilon: " << epsilon << " "
201 << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
Pablo Tello32521432018-11-15 14:43:10 +0000202 << " InPlace: " << is_in_place_operation(input, output)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100203 << std::endl);
204
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100205 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100206}
207
giuros01acce5042019-02-21 17:32:34 +0000208/** Create a backend batch normalization layer function
209 *
210 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
211 * @tparam TargetInfo Target-specific information
212 *
213 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000214 * @param[in] ctx Graph context
giuros01acce5042019-02-21 17:32:34 +0000215 *
216 * @return Backend batch normalization layer function
217 */
218template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000219std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
giuros01acce5042019-02-21 17:32:34 +0000220{
221 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
222
223 // Extract IO and info
224 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
225 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
226 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
227 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
228 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
229 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
230 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
231
232 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
233
234 const PadStrideInfo conv_info = node.convolution_info();
235 const unsigned int num_groups = node.num_groups();
236 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
237 const ActivationLayerInfo fused_act = node.fused_activation();
238 const float epsilon = node.epsilon();
239
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000240 // Create and configure function (we assume that functions have been validated before creation)
241 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
242 std::unique_ptr<IFunction> func;
243 std::string func_name;
244
245 using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
246
giuros01acce5042019-02-21 17:32:34 +0000247 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000248 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
249 std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
giuros01acce5042019-02-21 17:32:34 +0000250
251 // Log info
252 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
253 << node.name()
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100254 << " Type: " << node.type()
255 << " Target: " << TargetInfo::TargetType
256 << " Data Type: " << input->info()->data_type()
257 << " Input shape: " << input->info()->tensor_shape()
258 << " Weights shape: " << weights->info()->tensor_shape()
259 << " Output shape: " << output->info()->tensor_shape()
260 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
261 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100262 return std::move(func);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100263}
264
265/** Create a backend fused depthwise convolution batch normalization layer function
266 *
267 * @tparam FusedLayerTypes Fused layer types
268 * @tparam TargetInfo Target-specific information
269 *
270 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000271 * @param[in] ctx Graph context
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100272 *
273 * @return Backend fused depthwise convolution batch normalization layer function
274 */
275template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000276std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100277{
278 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
279
280 // Extract IO and info
281 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
282 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
283 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
284 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
285 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
286 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
287 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
288
289 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
290
291 const PadStrideInfo conv_info = node.convolution_info();
292 const unsigned int depth_multiplier = node.depth_multiplier();
293 const ActivationLayerInfo fused_act = node.fused_activation();
294 const float epsilon = node.epsilon();
295
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000296 // Create and configure function (we assume that functions have been validated before creation)
297 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
298 std::unique_ptr<IFunction> func;
299 std::string func_name;
300
301 using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
302
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100303 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000304 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
305 std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100306
307 // Log info
308 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
309 << node.name()
310 << " Type: " << node.type()
giuros01acce5042019-02-21 17:32:34 +0000311 << " Target: " << TargetInfo::TargetType
312 << " Data Type: " << input->info()->data_type()
313 << " Input shape: " << input->info()->tensor_shape()
314 << " Weights shape: " << weights->info()->tensor_shape()
315 << " Output shape: " << output->info()->tensor_shape()
316 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
317 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100318 return std::move(func);
giuros01acce5042019-02-21 17:32:34 +0000319}
320
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100321/** Create a backend bounding box transform layer function
322 *
323 * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
324 * @tparam TargetInfo Target-specific information
325 *
326 * @param[in] node Node to create the backend function for
327 *
328 * @return Backend bounding box transform layer function
329 */
330template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
331std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
332{
333 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
334
335 // Extract IO and info
336 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
337 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
338 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
339 const BoundingBoxTransformInfo bbox_info = node.info();
340
341 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000342 auto func = std::make_unique<BoundingBoxTransformLayerFunction>();
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100343 func->configure(input, output, deltas, bbox_info);
344
345 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000346 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
347 << node.name()
348 << " Type: " << node.type()
349 << " Target: " << TargetInfo::TargetType
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100350 << " Data Type: " << input->info()->data_type()
351 << " Shape: " << input->info()->tensor_shape()
352 << " BoundingBox Info img W: " << bbox_info.img_width() << " "
353 << " BoundingBox Info img H: " << bbox_info.img_height() << " "
354 << std::endl);
355
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100356 return std::move(func);
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100357}
358
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100359/** Create a backend channel shuffle layer function
360 *
361 * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
362 * @tparam TargetInfo Target-specific information
363 *
364 * @param[in] node Node to create the backend function for
365 *
366 * @return Backend channel shuffle layer function
367 */
368template <typename ChannelShuffleLayerFunction, typename TargetInfo>
369std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
370{
371 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
372
373 // Extract IO and info
374 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
375 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
376 const unsigned int num_groups = node.num_groups();
377
378 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000379 auto func = std::make_unique<ChannelShuffleLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100380 func->configure(input, output, num_groups);
381
Pablo Tello32521432018-11-15 14:43:10 +0000382 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
383 << node.name()
384 << " Type: " << node.type()
385 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100386 << " Data Type: " << input->info()->data_type()
387 << " Shape: " << input->info()->tensor_shape()
388 << " Num groups: " << num_groups
389 << std::endl);
390
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100391 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100392}
393
Georgios Pinitase2220552018-07-20 13:23:44 +0100394/** Create a backend layer concatenate function
395 *
396 * @tparam ConcatenateLayerFunction Backend concatenate function
397 * @tparam TargetInfo Target-specific information
398 *
399 * @param[in] node Node to create the backend function for
400 *
401 * @return Backend concatenate layer function
402 */
403template <typename ConcatenateLayerFunction, typename TargetInfo>
404std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
405{
406 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
407 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
408
409 // Return nullptr if depth concatenate is switched off
410 if(!node.is_enabled())
411 {
412 return nullptr;
413 }
414
415 // Extract IO and info
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100416 std::vector<typename TargetInfo::SrcTensorType *> inputs;
Georgios Pinitase2220552018-07-20 13:23:44 +0100417 for(unsigned int i = 0; i < node.num_inputs(); ++i)
418 {
419 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
420 }
421 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas9e4824c2019-04-12 13:15:58 +0100422 const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
423 const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
Georgios Pinitase2220552018-07-20 13:23:44 +0100424
425 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000426 auto func = std::make_unique<ConcatenateLayerFunction>();
Georgios Pinitase2220552018-07-20 13:23:44 +0100427 func->configure(inputs, output, concat_axis);
428
429 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000430 const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
431 std::ostringstream qss;
432 if(is_quantized)
433 {
434 qss << " Output QuantInfo: " << output->info()->quantization_info();
435 }
Pablo Tello32521432018-11-15 14:43:10 +0000436 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
437 << node.name()
438 << " Type: " << node.type()
439 << " Target: " << TargetInfo::TargetType
Georgios Pinitase2220552018-07-20 13:23:44 +0100440 << " Data Type: " << output->info()->data_type()
441 << " Shape: " << output->info()->tensor_shape()
442 << " Num Inputs: " << inputs.size()
443 << " Axis: " << concat_axis
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000444 << qss.str()
Georgios Pinitase2220552018-07-20 13:23:44 +0100445 << std::endl);
446
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100447 return std::move(func);
Georgios Pinitase2220552018-07-20 13:23:44 +0100448}
449
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100450/** Create a backend convolution layer function
451 *
452 * @tparam ConvolutionLayerFunctions Backend convolution functions
453 * @tparam TargetInfo Target-specific information
454 *
455 * @param[in] node Node to create the backend function for
456 * @param[in] ctx Graph context
457 *
458 * @return Backend convolution layer function
459 */
460template <typename ConvolutionLayerFunctions, typename TargetInfo>
461std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
462{
463 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
464
465 // Extract IO and info
466 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
467 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
468 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
469 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
470
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100471 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
472
473 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100474 {
475 biases->info()->set_data_type(DataType::S32);
476 }
477
Georgios Pinitas08346e92018-10-16 19:10:46 +0100478 const PadStrideInfo conv_info = node.convolution_info();
479 const unsigned int num_groups = node.num_groups();
480 const ConvolutionMethod conv_algorithm = node.convolution_method();
481 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
482 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100483
484 // Create and configure function (we assume that functions have been validated before creation)
485 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
486 std::unique_ptr<IFunction> func;
487 std::string func_name;
488
Georgios Pinitase2220552018-07-20 13:23:44 +0100489 if(conv_algorithm == ConvolutionMethod::Winograd)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100490 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100491 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100492 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
493 std::string("WinogradConvolutionLayer"), mm,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100494 input, weights, biases, output, conv_info, fused_act, fast_math);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100495 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100496 else if(conv_algorithm == ConvolutionMethod::Direct)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100497 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100498 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100499 std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
500 std::string("DirectConvolutionLayer"),
Georgios Pinitas08346e92018-10-16 19:10:46 +0100501 input, weights, biases, output, conv_info, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100502 }
503 else if(conv_algorithm == ConvolutionMethod::GEMM)
504 {
505 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
506 std::string("GEMMConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100507 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100508 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100509 }
510 else
511 {
512 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
513 std::string("GenericConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100514 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100515 WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100516 }
517
518 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100519 std::ostringstream qss;
520 if(is_quantized)
521 {
522 qss << " Input QuantInfo: " << input->info()->quantization_info()
523 << " Weights QuantInfo: " << weights->info()->quantization_info()
524 << " Output QuantInfo: " << output->info()->quantization_info();
525 }
Pablo Tello32521432018-11-15 14:43:10 +0000526 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
527 << node.name()
528 << " Type: " << func_name
529 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100530 << " Data Type: " << input->info()->data_type()
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100531 << " Groups: " << num_groups
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100532 << " Input shape: " << input->info()->tensor_shape()
533 << " Weights shape: " << weights->info()->tensor_shape()
534 << " Output shape: " << output->info()->tensor_shape()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000535 << qss.str()
Georgios Pinitas08346e92018-10-16 19:10:46 +0100536 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100537 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100538 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100539}
540
541/** Create a backend deconvolution layer function
542 *
543 * @tparam DeconvolutionLayerFunction Backend deconvolution function
544 * @tparam TargetInfo Target-specific information
545 *
546 * @param[in] node Node to create the backend function for
547 * @param[in] ctx Graph context
548 *
549 * @return Backend deconvolution layer function
550 */
551template <typename DeconvolutionLayerFunction, typename TargetInfo>
552std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
553{
554 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
555
556 // Extract IO and info
557 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
558 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
559 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
560 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
561
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100562 const PadStrideInfo deconv_info = node.deconvolution_info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100563
564 // Create and configure function (we assume that functions have been validated before creation)
565 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
566 std::unique_ptr<IFunction> func;
567
568 std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
569 std::string(), mm,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100570 input, weights, biases, output, deconv_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100571
572 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000573 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
574 << node.name()
575 << " Type: " << node.type()
576 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100577 << " Data Type: " << input->info()->data_type()
578 << " Input shape: " << input->info()->tensor_shape()
579 << " Weights shape: " << weights->info()->tensor_shape()
580 << " Output shape: " << output->info()->tensor_shape()
581 << std::endl);
582 return func;
583}
584
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100585/** Create a backend layer depth-wise convolution function
586 *
587 * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
588 * @tparam TargetInfo Target-specific information
589 *
590 * @param[in] node Node to create the backend function for
591 *
592 * @return Backend depth-wise convolution layer function
593 */
Manuel Bottini05069f02019-09-26 17:18:26 +0100594template <typename DepthwiseConvolutionLayer, typename TargetInfo>
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100595std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
596{
597 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
598
599 // Extract IO and info
600 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
601 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
602 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
603 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
604
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100605 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
606
607 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100608 {
609 biases->info()->set_data_type(DataType::S32);
610 }
611
Manuel Bottini05069f02019-09-26 17:18:26 +0100612 const PadStrideInfo conv_info = node.convolution_info();
613 const unsigned int depth_multiplier = node.depth_multiplier();
614 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100615
616 // Create and configure function (we assume that functions have been validated before creation)
617 std::unique_ptr<IFunction> func;
618 std::string func_name;
Manuel Bottini05069f02019-09-26 17:18:26 +0100619
620 std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
621 std::string("DepthwiseConvolutionLayer"),
622 input, weights, biases, output, conv_info, depth_multiplier, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100623
624 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100625 std::ostringstream qss;
626 if(is_quantized)
627 {
628 qss << " Input QuantInfo: " << input->info()->quantization_info()
629 << " Weights QuantInfo: " << weights->info()->quantization_info()
630 << " Output QuantInfo: " << output->info()->quantization_info();
631 }
Pablo Tello32521432018-11-15 14:43:10 +0000632 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
633 << node.name()
634 << " Type: " << func_name
635 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100636 << " Data Type: " << input->info()->data_type()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100637 << " Input shape: " << input->info()->tensor_shape()
638 << " Weights shape: " << weights->info()->tensor_shape()
639 << " Output shape: " << output->info()->tensor_shape()
Georgios Pinitas05045c12018-12-07 18:31:47 +0000640 << " Depth multiplier: " << depth_multiplier
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000641 << qss.str()
Georgios Pinitas60e98252018-10-22 16:17:20 +0100642 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100643 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100644 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100645}
646
thecha010a05e6a2020-08-28 18:40:38 +0100647/** Create a backend depth to space layer function
648 *
649 * @tparam DepthToSpaceLayerNode Function Backend depth to space function
650 * @tparam TargetInfo Target-specific information
651 *
652 * @param[in] node Node to create the backend function for
653 *
654 * @return Backend depth to space layer function
655 */
656template <typename DepthToSpaceLayerFunction, typename TargetInfo>
657std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
658{
659 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
660
661 // Extract IO and info
662 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
663 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
664
665 ARM_COMPUTE_ERROR_ON(input == nullptr);
666 ARM_COMPUTE_ERROR_ON(output == nullptr);
667
668 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000669 auto func = std::make_unique<DepthToSpaceLayerFunction>();
thecha010a05e6a2020-08-28 18:40:38 +0100670 func->configure(input, output, node.block_shape());
671
672 // Log info
673 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
674 << node.name()
675 << " Type: " << node.type()
676 << " Target: " << TargetInfo::TargetType
677 << " Data Type: " << input->info()->data_type()
678 << " Input shape: " << input->info()->tensor_shape()
679 << " Block Size: " << node.block_shape()
680 << " Output shape: " << output->info()->tensor_shape()
681 << std::endl);
682
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100683 return std::move(func);
thecha010a05e6a2020-08-28 18:40:38 +0100684}
685
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000686/** Create a backend dequantize layer function
687 *
688 * @tparam DequantizationLayer Function Backend dequantize function
689 * @tparam TargetInfo Target-specific information
690 *
691 * @param[in] node Node to create the backend function for
692 *
693 * @return Backend dequantize layer function
694 */
695template <typename DequantizationLayerFunction, typename TargetInfo>
696std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
697{
698 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
699
700 // Extract IO and info
701 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
702 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
703
704 ARM_COMPUTE_ERROR_ON(input == nullptr);
705 ARM_COMPUTE_ERROR_ON(output == nullptr);
706
707 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000708 auto func = std::make_unique<DequantizationLayerFunction>();
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000709 func->configure(input, output);
710
711 // Log info
712 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
713 << node.name()
714 << " Type: " << node.type()
715 << " Target: " << TargetInfo::TargetType
716 << " Data Type: " << input->info()->data_type()
717 << " Input shape: " << input->info()->tensor_shape()
718 << " Input quantization info: " << output->info()->quantization_info()
719 << " Output shape: " << output->info()->tensor_shape()
720 << std::endl);
721
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100722 return std::move(func);
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000723}
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000724/** Create a backend detection output layer function
725 *
726 * @tparam DetectionOutputLayer Function Backend detection output function
727 * @tparam TargetInfo Target-specific information
728 *
729 * @param[in] node Node to create the backend function for
730 *
731 * @return Backend detection output layer function
732 */
733template <typename DetectionOutputLayerFunction, typename TargetInfo>
734std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
735{
736 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
737
738 // Extract IO and info
739 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
740 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
741 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
742 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
743 const DetectionOutputLayerInfo detect_info = node.detection_output_info();
744
745 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
746 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
747 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
748 ARM_COMPUTE_ERROR_ON(output == nullptr);
749
750 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000751 auto func = std::make_unique<DetectionOutputLayerFunction>();
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000752 func->configure(input0, input1, input2, output, detect_info);
753
754 // Log info
755 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
756 << node.name()
757 << " Type: " << node.type()
758 << " Target: " << TargetInfo::TargetType
759 << " Data Type: " << input0->info()->data_type()
760 << " Input0 shape: " << input0->info()->tensor_shape()
761 << " Input1 shape: " << input1->info()->tensor_shape()
762 << " Input2 shape: " << input2->info()->tensor_shape()
763 << " Output shape: " << output->info()->tensor_shape()
764 << " DetectionOutputLayer info: " << detect_info
765 << std::endl);
766
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100767 return std::move(func);
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000768}
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000769
770/** Create a backend detection post process layer function
771 *
772 * @tparam DetectionPostProcessLayerFunction Backend detection output function
773 * @tparam TargetInfo Target-specific information
774 *
775 * @param[in] node Node to create the backend function for
776 *
777 * @return Backend detection post process layer function
778 */
779template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
780std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
781{
782 validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
783
784 // Extract IO and info
785 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
786 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
787 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
788 typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
789 typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
790 typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
791 typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
792 const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
793
794 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
795 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
796 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
797 ARM_COMPUTE_ERROR_ON(output0 == nullptr);
798 ARM_COMPUTE_ERROR_ON(output1 == nullptr);
799 ARM_COMPUTE_ERROR_ON(output2 == nullptr);
800 ARM_COMPUTE_ERROR_ON(output3 == nullptr);
801
802 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000803 auto func = std::make_unique<DetectionPostProcessLayerFunction>();
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000804 func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
805
806 // Log info
807 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
808 << node.name()
809 << " Type: " << node.type()
810 << " Target: " << TargetInfo::TargetType
811 << " Data Type: " << input0->info()->data_type()
812 << " Input0 shape: " << input0->info()->tensor_shape()
813 << " Input1 shape: " << input1->info()->tensor_shape()
814 << " Input2 shape: " << input2->info()->tensor_shape()
815 << " Output0 shape: " << output0->info()->tensor_shape()
816 << " Output1 shape: " << output1->info()->tensor_shape()
817 << " Output2 shape: " << output2->info()->tensor_shape()
818 << " Output3 shape: " << output3->info()->tensor_shape()
819 << " DetectionPostProcessLayer info: " << detect_info
820 << std::endl);
821
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100822 return std::move(func);
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000823}
824
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100825/** Create a backend element-wise operation layer function
826 *
827 * @tparam EltwiseFunctions Backend element-wise function
828 * @tparam TargetInfo Target-specific information
829 *
830 * @param[in] node Node to create the backend function for
831 *
832 * @return Backend element-wise operation layer function
833 */
834template <typename EltwiseFunctions, typename TargetInfo>
835std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
836{
837 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
838
839 // Extract IO and info
840 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
841 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
842 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
843 const EltwiseOperation eltwise_op = node.eltwise_operation();
844 const ConvertPolicy convert_policy = node.convert_policy();
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000845 const ActivationLayerInfo act_info = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100846 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
847 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
848 ARM_COMPUTE_ERROR_ON(output == nullptr);
849
850 std::unique_ptr<IFunction> func = nullptr;
851 std::string func_name;
Georgios Pinitase2220552018-07-20 13:23:44 +0100852 if(eltwise_op == EltwiseOperation::Add)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100853 {
854 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
855 std::string("ArithmeticAddition"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000856 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100857 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100858 else if(eltwise_op == EltwiseOperation::Sub)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100859 {
860 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
861 std::string("ArithmeticSubtraction"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000862 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100863 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100864 else if(eltwise_op == EltwiseOperation::Mul)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100865 {
866 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
867 std::string("PixelWiseMultiplication"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000868 input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100869 }
thecha01f8e35842020-07-28 17:28:17 +0100870 else if(eltwise_op == EltwiseOperation::Max)
871 {
872 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
873 std::string("ElementwiseMaximum"),
874 input1, input2, output, act_info);
875 }
Alessandro Navone6413e492021-02-02 11:39:05 +0000876 else if(eltwise_op == EltwiseOperation::Div)
877 {
878 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Division>(
879 std::string("ArithmeticDivision"),
880 input1, input2, output, act_info);
881 }
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100882 else
883 {
884 ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
885 }
886
887 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000888 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
889 << node.name()
890 << " Type: " << node.type()
891 << " Target: " << TargetInfo::TargetType
892 << " Operation: " << func_name
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100893 << " Data Type: " << input1->info()->data_type()
Pablo Tello32521432018-11-15 14:43:10 +0000894 << " Shape: " << input1->info()->tensor_shape()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100895 << std::endl);
896
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100897 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100898}
899
Sheri Zhang16dddd22020-05-27 15:03:48 +0100900/** Create a backend unary element-wise operation layer function
901 *
902 * @tparam UnaryEltwiseFunctions Backend unary element-wise function
903 * @tparam TargetInfo Target-specific information
904 *
905 * @param[in] node Node to create the backend function for
906 *
907 * @return Backend unary element-wise operation layer function
908 */
909template <typename UnaryEltwiseFunctions, typename TargetInfo>
910std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
911{
912 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
913
914 // Extract IO and info
915 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
916 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
917 const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
918
919 ARM_COMPUTE_ERROR_ON(input == nullptr);
920 ARM_COMPUTE_ERROR_ON(output == nullptr);
921
922 std::unique_ptr<IFunction> func = nullptr;
923 std::string func_name;
924 if(eltwise_op == UnaryEltwiseOperation::Exp)
925 {
926 std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
927 std::string("Exp"),
928 input, output);
929 }
930 else
931 {
932 ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
933 }
934
935 // Log info
936 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
937 << node.name()
938 << " Type: " << node.type()
939 << " Target: " << TargetInfo::TargetType
940 << " Operation: " << func_name
941 << " Data Type: " << input->info()->data_type()
942 << " Shape: " << input->info()->tensor_shape()
943 << std::endl);
944
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100945 return std::move(func);
Sheri Zhang16dddd22020-05-27 15:03:48 +0100946}
947
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100948/** Create a backend flatten layer function
949 *
950 * @tparam FlattenLayerFunction Backend flatten function
951 * @tparam TargetInfo Target-specific information
952 *
953 * @param[in] node Node to create the backend function for
954 *
955 * @return Backend flatten layer function
956 */
957template <typename FlattenLayerFunction, typename TargetInfo>
958std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
959{
960 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
961
962 // Extract IO and info
963 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
964 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
965
Georgios Pinitase2220552018-07-20 13:23:44 +0100966 ARM_COMPUTE_ERROR_ON(input == nullptr);
967 ARM_COMPUTE_ERROR_ON(output == nullptr);
968
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100969 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000970 auto func = std::make_unique<FlattenLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100971 func->configure(input, output);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100972
973 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000974 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
975 << node.name()
976 << " Type: " << node.type()
977 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100978 << " Data Type: " << input->info()->data_type()
979 << " Input shape: " << input->info()->tensor_shape()
980 << " Output shape: " << output->info()->tensor_shape()
981 << std::endl);
982
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100983 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100984}
985
986/** Create a backend fully connected layer function
987 *
988 * @tparam FullyConnectedLayerFunction Backend fully-connected function
989 * @tparam TargetInfo Target-specific information
990 *
991 * @param[in] node Node to create the backend function for
992 * @param[in] ctx Graph context
993 *
994 * @return Backend fully connected layer function
995 */
996template <typename FullyConnectedLayerFunction, typename TargetInfo>
997std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
998{
999 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1000
1001 // Extract IO and info
1002 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1003 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
1004 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
1005 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +01001006 const FullyConnectedLayerInfo fc_info = node.info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001007
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001008 ARM_COMPUTE_ERROR_ON(input == nullptr);
1009 ARM_COMPUTE_ERROR_ON(weights == nullptr);
1010 ARM_COMPUTE_ERROR_ON(output == nullptr);
1011
Georgios Pinitase2220552018-07-20 13:23:44 +01001012 // Create and configure function
Michalis Spyrou1a569a32019-09-10 17:20:34 +01001013 auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1014 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001015 auto func = std::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
Georgios Pinitase2220552018-07-20 13:23:44 +01001016 func->configure(input, weights, biases, output, fc_info);
1017
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001018 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1019
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001020 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001021 std::ostringstream qss;
1022 if(is_quantized)
1023 {
1024 qss << " Input QuantInfo: " << input->info()->quantization_info()
1025 << " Weights QuantInfo: " << weights->info()->quantization_info()
1026 << " Output QuantInfo: " << output->info()->quantization_info();
1027 }
Pablo Tello32521432018-11-15 14:43:10 +00001028 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1029 << node.name()
1030 << " Type: " << node.type()
1031 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001032 << " Data Type: " << input->info()->data_type()
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001033 << qss.str()
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001034 << " Input shape: " << input->info()->tensor_shape()
1035 << " Weights shape: " << weights->info()->tensor_shape()
1036 << " Output shape: " << output->info()->tensor_shape()
1037 << std::endl);
1038
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001039 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001040}
1041
Manuel Bottini5209be52019-02-13 16:34:56 +00001042/** Create a backend generate proposals layer function
1043 *
1044 * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1045 * @tparam TargetInfo Target-specific information
1046 *
1047 * @param[in] node Node to create the backend function for
1048 * @param[in] ctx Graph context
1049 *
1050 * @return Backend generate proposals layer function
1051 */
1052template <typename GenerateProposalsLayerFunction, typename TargetInfo>
1053std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1054{
1055 validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1056
1057 // Extract IO and info
1058 typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1059 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1060 typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1061 typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1062 typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1063 typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1064 const GenerateProposalsInfo info = node.info();
1065
1066 ARM_COMPUTE_ERROR_ON(scores == nullptr);
1067 ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1068 ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1069 ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1070 ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1071
1072 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001073 auto func = std::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Manuel Bottini5209be52019-02-13 16:34:56 +00001074 func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1075
1076 // Log info
1077 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1078 << " Target " << TargetInfo::TargetType
1079 << " Data Type: " << scores->info()->data_type()
1080 << " Scores shape: " << scores->info()->tensor_shape()
1081 << " Deltas shape: " << deltas->info()->tensor_shape()
1082 << " Anchors shape: " << anchors->info()->tensor_shape()
1083 << " Proposals shape: " << proposals->info()->tensor_shape()
1084 << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1085 << " Scores Out shape: " << scores_out->info()->tensor_shape()
1086 << std::endl);
1087
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001088 return std::move(func);
Manuel Bottini5209be52019-02-13 16:34:56 +00001089}
1090
thecha013603aff2020-09-01 14:52:38 +01001091/** Create a backend l2 normalization layer function
1092 *
1093 * @tparam NormalizationLayerFunction Backend normalization function
1094 * @tparam TargetInfo Target-specific information
1095 *
1096 * @param[in] node Node to create the backend function for
1097 * @param[in] ctx Graph context
1098 *
1099 * @return Backend normalization layer function
1100 */
1101template <typename L2NormalizeLayerFunction, typename TargetInfo>
1102std::unique_ptr<IFunction> create_l2_normalize_layer(L2NormalizeLayerNode &node, GraphContext &ctx)
1103{
1104 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1105
1106 // Extract IO and info
1107 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1108 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1109 int axis = node.axis();
1110 float epsilon = node.epsilon();
1111
1112 ARM_COMPUTE_ERROR_ON(input == nullptr);
1113 ARM_COMPUTE_ERROR_ON(output == nullptr);
1114
1115 // Create and configure function
1116 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001117 auto func = std::make_unique<L2NormalizeLayerFunction>(mm);
thecha013603aff2020-09-01 14:52:38 +01001118 func->configure(input, output, axis, epsilon);
1119
1120 // Log info
1121 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1122 << node.name()
1123 << " Type: " << node.type()
1124 << " Target: " << TargetInfo::TargetType
1125 << " Data Type: " << input->info()->data_type()
1126 << " Input shape: " << input->info()->tensor_shape()
1127 << " Output shape: " << output->info()->tensor_shape()
1128 << " Axis: " << axis
1129 << " Epsilon: " << epsilon
1130 << std::endl);
1131
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001132 return std::move(func);
thecha013603aff2020-09-01 14:52:38 +01001133}
1134
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001135/** Create a backend normalization layer function
1136 *
1137 * @tparam NormalizationLayerFunction Backend normalization function
1138 * @tparam TargetInfo Target-specific information
1139 *
1140 * @param[in] node Node to create the backend function for
1141 * @param[in] ctx Graph context
1142 *
1143 * @return Backend normalization layer function
1144 */
1145template <typename NormalizationLayerFunction, typename TargetInfo>
1146std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1147{
1148 ARM_COMPUTE_UNUSED(ctx);
1149
1150 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1151
1152 // Extract IO and info
1153 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1154 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1155 const NormalizationLayerInfo norm_info = node.normalization_info();
1156 ARM_COMPUTE_ERROR_ON(input == nullptr);
1157 ARM_COMPUTE_ERROR_ON(output == nullptr);
1158
1159 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001160 auto func = std::make_unique<NormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001161 func->configure(input, output, norm_info);
1162
1163 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001164 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1165 << node.name()
1166 << " Type: " << node.type()
1167 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001168 << " Data Type: " << input->info()->data_type()
1169 << " Input shape: " << input->info()->tensor_shape()
1170 << " Output shape: " << output->info()->tensor_shape()
1171 << " Normalization info: " << norm_info.type()
1172 << std::endl);
1173
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001174 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001175}
1176
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001177/** Create a backend normalize planar YUV layer function
1178 *
1179 * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1180 * @tparam TargetInfo Target-specific information
1181 *
1182 * @param[in] node Node to create the backend function for
1183 *
1184 * @return Backend normalize plnar YUV layer function
1185 */
1186template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
1187std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1188{
1189 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1190
1191 // Extract IO and info
1192 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1193 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1194 typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1195 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1196 ARM_COMPUTE_ERROR_ON(input == nullptr);
1197 ARM_COMPUTE_ERROR_ON(mean == nullptr);
1198 ARM_COMPUTE_ERROR_ON(std == nullptr);
1199 ARM_COMPUTE_ERROR_ON(output == nullptr);
1200
1201 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001202 auto func = std::make_unique<NormalizePlanarYUVLayerFunction>();
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001203 func->configure(input, output, mean, std);
1204
1205 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001206 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1207 << node.name()
1208 << " Type: " << node.type()
1209 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001210 << " Data Type: " << input->info()->data_type()
1211 << " Shape: " << input->info()->tensor_shape()
1212 << std::endl);
1213
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001214 return std::move(func);
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001215}
1216
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001217/** Create a backend pad layer function
1218 *
1219 * @tparam PadLayerFunction Backend pad function
1220 * @tparam TargetInfo Target-specific information
1221 *
1222 * @param[in] node Node to create the backend function for
1223 *
1224 * @return Backend pad layer function
1225 */
1226template <typename PadLayerFunction, typename TargetInfo>
1227std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1228{
1229 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1230
1231 // Extract IO and info
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001232 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1233 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1234 const PaddingList &padding = node.padding();
1235 const PixelValue pad_value = node.pad_value();
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001236 ARM_COMPUTE_ERROR_ON(input == nullptr);
1237 ARM_COMPUTE_ERROR_ON(output == nullptr);
1238
1239 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001240 auto func = std::make_unique<PadLayerFunction>();
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001241 func->configure(input, output, padding, pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001242
1243 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001244 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1245 << node.name()
1246 << " Type: " << node.type()
1247 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001248 << " Data Type: " << input->info()->data_type()
1249 << " Input shape: " << input->info()->tensor_shape()
1250 << " Output shape: " << output->info()->tensor_shape()
1251 << std::endl);
1252
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001253 return std::move(func);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001254}
1255
Georgios Pinitas57c48242018-08-02 13:41:49 +01001256/** Create a backend permute layer function
1257 *
1258 * @tparam PermuteLayerFunction Backend permute function
1259 * @tparam TargetInfo Target-specific information
1260 *
1261 * @param[in] node Node to create the backend function for
1262 *
1263 * @return Backend permute layer function
1264 */
1265template <typename PermuteLayerFunction, typename TargetInfo>
1266std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1267{
1268 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1269
1270 // Extract IO and info
1271 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1272 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1273 const PermutationVector &perm = node.permutation_vector();
1274 ARM_COMPUTE_ERROR_ON(input == nullptr);
1275 ARM_COMPUTE_ERROR_ON(output == nullptr);
1276
1277 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001278 auto func = std::make_unique<PermuteLayerFunction>();
Georgios Pinitas57c48242018-08-02 13:41:49 +01001279 func->configure(input, output, perm);
1280
1281 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001282 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1283 << node.name()
1284 << " Type: " << node.type()
1285 << " Target: " << TargetInfo::TargetType
Georgios Pinitas57c48242018-08-02 13:41:49 +01001286 << " Data Type: " << input->info()->data_type()
1287 << " Input shape: " << input->info()->tensor_shape()
1288 << " Output shape: " << output->info()->tensor_shape()
1289 << " Permutation vector: " << perm
1290 << std::endl);
1291
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001292 return std::move(func);
Georgios Pinitas57c48242018-08-02 13:41:49 +01001293}
1294
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001295/** Create a backend pooling layer function
1296 *
1297 * @tparam PoolingLayerFunction Backend pooling function
1298 * @tparam TargetInfo Target-specific information
1299 *
1300 * @param[in] node Node to create the backend function for
1301 *
1302 * @return Backend pooling layer function
1303 */
1304template <typename PoolingLayerFunction, typename TargetInfo>
1305std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1306{
1307 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1308
1309 // Extract IO and info
1310 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1311 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1312 const PoolingLayerInfo pool_info = node.pooling_info();
1313 ARM_COMPUTE_ERROR_ON(input == nullptr);
1314 ARM_COMPUTE_ERROR_ON(output == nullptr);
1315
1316 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001317 auto func = std::make_unique<PoolingLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001318 func->configure(input, output, pool_info);
1319
1320 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001321 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1322 << node.name()
1323 << " Type: " << node.type()
1324 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001325 << " Data Type: " << input->info()->data_type()
1326 << " Input shape: " << input->info()->tensor_shape()
1327 << " Output shape: " << output->info()->tensor_shape()
Sang-Hoon Park0cb3da62020-01-15 12:39:56 +00001328 << " Pooling info: " << pool_info.pool_type
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001329 << std::endl);
1330
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001331 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001332}
1333
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001334/** Create a backend PRelu layer function
1335 *
1336 * @tparam PReluFunction Backend PRelu function
1337 * @tparam TargetInfo Target-specific information
1338 *
1339 * @param[in] node Node to create the backend function for
1340 *
1341 * @return Backend PRelu layer function
1342 */
1343template <typename PReluFunction, typename TargetInfo>
1344std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1345{
1346 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1347
1348 // Extract IO and info
1349 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1350 typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1351 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1352 ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1353 ARM_COMPUTE_ERROR_ON(output == nullptr);
1354
1355 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001356 auto func = std::make_unique<PReluFunction>();
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001357 func->configure(input, alpha, output);
1358
1359 // Log info
1360 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1361 << node.name()
1362 << " Type: " << node.type()
1363 << " Target: " << TargetInfo::TargetType
1364 << " Data Type: " << input->info()->data_type()
1365 << " Input shape: " << input->info()->tensor_shape()
1366 << " Output shape: " << output->info()->tensor_shape()
1367 << std::endl);
1368
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001369 return std::move(func);
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001370}
1371
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001372/** Create a backend print layer function
1373 *
1374 * @tparam TargetInfo Target-specific information
1375 *
1376 * @param[in] node Node to create the backend function for
1377 *
1378 * @return Backend print layer function
1379 */
1380template <typename TargetInfo>
1381std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1382{
1383 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1384
1385 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1386 ARM_COMPUTE_ERROR_ON(input == nullptr);
1387 ARM_COMPUTE_UNUSED(input);
1388
1389 // Log info
1390 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1391 << node.name()
1392 << " Type: " << node.type()
1393 << " Target: " << TargetInfo::TargetType
1394 << " Data Type: " << input->info()->data_type()
1395 << " Input shape: " << input->info()->tensor_shape()
1396 << std::endl);
1397
1398 return nullptr;
1399}
1400
Pablo Tello32521432018-11-15 14:43:10 +00001401/** Create a backend priorbox layer function
1402 *
1403 * @tparam PriorBoxLayerFunction Backend priorbox function
1404 * @tparam TargetInfo Target-specific information
1405 *
1406 * @param[in] node Node to create the backend function for
1407 *
1408 * @return Backend priorbox layer function
1409 */
1410template <typename PriorBoxLayerFunction, typename TargetInfo>
1411std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1412{
1413 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1414
1415 // Extract IO and info
1416 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1417 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1418 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1419 const PriorBoxLayerInfo prior_info = node.priorbox_info();
1420 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1421 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1422 ARM_COMPUTE_ERROR_ON(output == nullptr);
1423
1424 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001425 auto func = std::make_unique<PriorBoxLayerFunction>();
Pablo Tello32521432018-11-15 14:43:10 +00001426 func->configure(input0, input1, output, prior_info);
1427
1428 // Log info
1429 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1430 << node.name()
1431 << " Type: " << node.type()
1432 << " Target: " << TargetInfo::TargetType
1433 << " Data Type: " << input0->info()->data_type()
1434 << " Input0 shape: " << input0->info()->tensor_shape()
1435 << " Input1 shape: " << input1->info()->tensor_shape()
1436 << " Output shape: " << output->info()->tensor_shape()
1437 << " PriorBoxLayer info: " << prior_info
1438 << std::endl);
1439
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001440 return std::move(func);
Pablo Tello32521432018-11-15 14:43:10 +00001441}
1442
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001443/** Create a backend quantization layer function
1444 *
1445 * @tparam QuantizationLayerFunction Backend quantization function
1446 * @tparam TargetInfo Target-specific information
1447 *
1448 * @param[in] node Node to create the backend function for
1449 *
1450 * @return Backend quantization layer function
1451 */
1452template <typename QuantizationLayerFunction, typename TargetInfo>
1453std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1454{
1455 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1456
1457 // Extract IO and info
1458 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1459 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1460 ARM_COMPUTE_ERROR_ON(input == nullptr);
1461 ARM_COMPUTE_ERROR_ON(output == nullptr);
1462
1463 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001464 auto func = std::make_unique<QuantizationLayerFunction>();
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001465 func->configure(input, output);
1466
1467 // Log info
1468 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1469 << node.name()
1470 << " Type: " << node.type()
1471 << " Target: " << TargetInfo::TargetType
1472 << " Data Type: " << input->info()->data_type()
1473 << " Input shape: " << input->info()->tensor_shape()
1474 << " Output shape: " << output->info()->tensor_shape()
1475 << std::endl);
1476
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001477 return std::move(func);
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001478}
1479
thecha01d64444b2020-09-07 14:50:21 +01001480/** Create a backend reduction operation layer function
1481 *
1482 * @tparam ReductionOperationFunction Backend reduction operation function
1483 * @tparam TargetInfo Target-specific information
1484 *
1485 * @param[in] node Node to create the backend function for
1486 * @param[in] ctx Graph context
1487 *
1488 * @return Backend reduction sum layer function
1489 */
1490template <typename ReductionOperationFunction, typename TargetInfo>
1491std::unique_ptr<IFunction> create_reduction_operation_layer(ReductionLayerNode &node, GraphContext &ctx)
1492{
1493 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1494
1495 // Extract IO and info
1496 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1497 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1498 ReductionOperation op = node.op();
1499 int axis = node.axis();
1500 bool keep_dims = node.keep_dims();
1501 ARM_COMPUTE_ERROR_ON(input == nullptr);
1502 ARM_COMPUTE_ERROR_ON(output == nullptr);
1503
1504 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001505 auto func = std::make_unique<ReductionOperationFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
thecha01d64444b2020-09-07 14:50:21 +01001506 func->configure(input, output, axis, op, keep_dims);
1507
1508 // Log info
1509 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1510 << node.name()
1511 << " Type: " << node.type()
1512 << " Target: " << TargetInfo::TargetType
1513 << " Data Type: " << input->info()->data_type()
1514 << " Input shape: " << input->info()->tensor_shape()
1515 << " Output shape: " << output->info()->tensor_shape()
1516 << " Operation: " << op
1517 << " Axis: " << axis
1518 << " Keep dimensions:" << keep_dims
1519 << std::endl);
1520
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001521 return std::move(func);
thecha01d64444b2020-09-07 14:50:21 +01001522}
1523
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001524/** Create a backend reorg layer function
1525 *
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001526 * @tparam ReorgLayerFunction Backend reorg function
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001527 * @tparam TargetInfo Target-specific information
1528 *
1529 * @param[in] node Node to create the backend function for
1530 *
1531 * @return Backend reshape layer function
1532 */
1533template <typename ReorgLayerFunction, typename TargetInfo>
1534std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1535{
1536 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1537
1538 // Extract IO and info
1539 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1540 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1541 ARM_COMPUTE_ERROR_ON(input == nullptr);
1542 ARM_COMPUTE_ERROR_ON(output == nullptr);
1543
1544 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001545 auto func = std::make_unique<ReorgLayerFunction>();
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001546 func->configure(input, output, node.stride());
1547
1548 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001549 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1550 << node.name()
1551 << " Type: " << node.type()
1552 << " Target: " << TargetInfo::TargetType
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001553 << " Data Type: " << input->info()->data_type()
1554 << " Input shape: " << input->info()->tensor_shape()
1555 << " Output shape: " << output->info()->tensor_shape()
1556 << std::endl);
1557
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001558 return std::move(func);
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001559}
1560
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001561/** Create a backend reshape layer function
1562 *
1563 * @tparam ReshapeLayerFunction Backend reshape function
1564 * @tparam TargetInfo Target-specific information
1565 *
1566 * @param[in] node Node to create the backend function for
1567 *
1568 * @return Backend reshape layer function
1569 */
1570template <typename ReshapeLayerFunction, typename TargetInfo>
1571std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1572{
1573 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1574
1575 // Extract IO and info
1576 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1577 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1578 ARM_COMPUTE_ERROR_ON(input == nullptr);
1579 ARM_COMPUTE_ERROR_ON(output == nullptr);
1580
1581 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001582 auto func = std::make_unique<ReshapeLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001583 func->configure(input, output);
1584
1585 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001586 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1587 << node.name()
1588 << " Type: " << node.type()
1589 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001590 << " Data Type: " << input->info()->data_type()
1591 << " Input shape: " << input->info()->tensor_shape()
1592 << " Output shape: " << output->info()->tensor_shape()
1593 << std::endl);
1594
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001595 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001596}
1597
1598/** Create a backend resize layer function
1599 *
1600 * @tparam ResizeLayerFunction Backend resize function
1601 * @tparam TargetInfo Target-specific information
1602 *
1603 * @param[in] node Node to create the backend function for
1604 *
1605 * @return Backend resize layer function
1606 */
1607template <typename ResizeLayerFunction, typename TargetInfo>
1608std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1609{
1610 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1611
1612 // Extract IO and info
1613 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1614 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1615 ARM_COMPUTE_ERROR_ON(input == nullptr);
1616 ARM_COMPUTE_ERROR_ON(output == nullptr);
1617 const InterpolationPolicy policy = node.policy();
1618
1619 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001620 auto func = std::make_unique<ResizeLayerFunction>();
Georgios Pinitasc53266e2020-12-09 03:11:53 +00001621 func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT, PixelValue(), SamplingPolicy::CENTER, false, false });
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001622
1623 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001624 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1625 << node.name()
1626 << " Type: " << node.type()
1627 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001628 << " Data Type: " << input->info()->data_type()
1629 << " Input shape: " << input->info()->tensor_shape()
1630 << " Output shape: " << output->info()->tensor_shape()
1631 << " Interpolation: " << policy
1632 << std::endl);
1633
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001634 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001635}
1636
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001637/** Create a backend ROI align layer function
1638 *
1639 * @tparam ROIAlignLayerFunction ROI Align function
1640 * @tparam TargetInfo Target-specific information
1641 *
1642 * @param[in] node Node to create the backend function for
1643 *
1644 * @return ROI Align layer function
1645 */
1646template <typename ROIAlignLayerFunction, typename TargetInfo>
1647std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1648{
1649 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1650
1651 // Extract IO and info
1652 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1653 typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1654 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1655 ARM_COMPUTE_ERROR_ON(input == nullptr);
1656 ARM_COMPUTE_ERROR_ON(output == nullptr);
1657 ARM_COMPUTE_ERROR_ON(rois == nullptr);
1658
1659 const ROIPoolingLayerInfo pool_info = node.pooling_info();
1660
1661 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001662 auto func = std::make_unique<ROIAlignLayerFunction>();
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001663
1664 func->configure(input, rois, output, pool_info);
1665
1666 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +00001667 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1668 << node.name()
1669 << " Type: " << node.type()
1670 << " Target: " << TargetInfo::TargetType
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001671 << " Data Type: " << input->info()->data_type()
1672 << " Input shape: " << input->info()->tensor_shape()
1673 << " Output shape: " << output->info()->tensor_shape()
1674 << " ROIs shape: " << rois->info()->tensor_shape()
1675 << " ROIPooling width: " << pool_info.pooled_width()
1676 << " ROIPooling height: " << pool_info.pooled_height()
1677 << std::endl);
1678
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001679 return std::move(func);
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001680}
1681
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001682/** Create a backend slice layer function
1683 *
1684 * @tparam SliceLayerFunction Backend slice function
1685 * @tparam TargetInfo Target-specific information
1686 *
1687 * @param[in] node Node to create the backend function for
1688 *
1689 * @return Backend slice layer function
1690 */
1691template <typename SliceLayerFunction, typename TargetInfo>
1692std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1693{
1694 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1695
1696 // Extract IO and info
1697 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1698 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1699 ARM_COMPUTE_ERROR_ON(input == nullptr);
1700 ARM_COMPUTE_ERROR_ON(output == nullptr);
1701
1702 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001703 auto func = std::make_unique<SliceLayerFunction>();
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001704 func->configure(input, output, node.starts(), node.ends());
1705
1706 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001707 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1708 << node.name()
1709 << " Type: " << node.type()
1710 << " Target: " << TargetInfo::TargetType
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001711 << " Data Type: " << input->info()->data_type()
1712 << " Input shape: " << input->info()->tensor_shape()
1713 << " Output shape: " << output->info()->tensor_shape()
1714 << std::endl);
1715
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001716 return std::move(func);
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001717}
1718
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001719/** Create a backend softmax layer function
1720 *
1721 * @tparam SoftmaxLayerFunction Backend softmax function
1722 * @tparam TargetInfo Target-specific information
1723 *
1724 * @param[in] node Node to create the backend function for
1725 * @param[in] ctx Graph context
1726 *
1727 * @return Backend softmax layer function
1728 */
1729template <typename SoftmaxLayerFunction, typename TargetInfo>
1730std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1731{
1732 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1733
1734 // Extract IO and info
1735 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1736 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1737 const float beta = node.beta();
1738 ARM_COMPUTE_ERROR_ON(input == nullptr);
1739 ARM_COMPUTE_ERROR_ON(output == nullptr);
1740
1741 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001742 auto func = std::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001743 func->configure(input, output, beta);
1744
1745 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001746 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1747 << node.name()
1748 << " Type: " << node.type()
1749 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001750 << " Data Type: " << input->info()->data_type()
1751 << " Input shape: " << input->info()->tensor_shape()
1752 << " Output shape: " << output->info()->tensor_shape()
1753 << std::endl);
1754
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001755 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001756}
Michele Di Giorgioec699752019-03-22 15:25:32 +00001757
1758/** Create a backend layer stack function
1759 *
1760 * @tparam StackLayerFunction Backend stack function
1761 * @tparam TargetInfo Target-specific information
1762 *
1763 * @param[in] node Node to create the backend function for
1764 *
1765 * @return Backend stack layer function
1766 */
1767template <typename StackLayerFunction, typename TargetInfo>
1768std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1769{
1770 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1771 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1772
1773 // Extract IO and info
1774 std::vector<typename TargetInfo::TensorType *> inputs;
1775 for(unsigned int i = 0; i < node.num_inputs(); ++i)
1776 {
1777 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1778 }
1779 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1780 const int axis = node.axis();
1781
1782 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001783 auto func = std::make_unique<StackLayerFunction>();
Michele Di Giorgioec699752019-03-22 15:25:32 +00001784 func->configure(inputs, axis, output);
1785
1786 // Log info
1787 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1788 << node.name()
1789 << " Type: " << node.type()
1790 << " Target: " << TargetInfo::TargetType
1791 << " Data Type: " << output->info()->data_type()
1792 << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1793 << " Output shape: " << output->info()->tensor_shape()
1794 << " Num Inputs: " << inputs.size()
1795 << " Axis: " << axis
1796 << std::endl);
1797
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001798 return std::move(func);
Michele Di Giorgioec699752019-03-22 15:25:32 +00001799}
thecha012bfadd92020-08-12 17:25:51 +01001800
1801/** Create a backend slice layer function
1802 *
1803 * @tparam StridedSliceLayerFunction Backend strided slice function
1804 * @tparam TargetInfo Target-specific information
1805 *
1806 * @param[in] node Node to create the backend function for
1807 *
1808 * @return Backend strided slice layer function
1809 */
1810template <typename StridedSliceLayerFunction, typename TargetInfo>
1811std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1812{
1813 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1814
1815 // Extract IO and info
1816 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1817 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1818 Coordinates starts = node.starts();
1819 Coordinates ends = node.ends();
1820 BiStrides strides = node.strides();
1821 StridedSliceLayerInfo info = node.strided_slice_info();
1822
1823 ARM_COMPUTE_ERROR_ON(input == nullptr);
1824 ARM_COMPUTE_ERROR_ON(output == nullptr);
1825
1826 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001827 auto func = std::make_unique<StridedSliceLayerFunction>();
thecha012bfadd92020-08-12 17:25:51 +01001828 func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
1829
1830 // Log info
1831 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1832 << node.name()
1833 << " Type: " << node.type()
1834 << " Target: " << TargetInfo::TargetType
1835 << " Data Type: " << input->info()->data_type()
1836 << " Input shape: " << input->info()->tensor_shape()
1837 << " Output shape: " << output->info()->tensor_shape()
1838 << std::endl);
1839
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001840 return std::move(func);
thecha012bfadd92020-08-12 17:25:51 +01001841}
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001842} // namespace detail
1843} // namespace backends
1844} // namespace graph
1845} // namespace arm_compute
1846
Michalis Spyrouf4643372019-11-29 16:17:13 +00001847#endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */