blob: ee5dc3e28541688fc70cfbb426e2baac4313a31a [file] [log] [blame]
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2018-2020 Arm Limited.
Georgios Pinitasda2491f2018-06-01 17:49:09 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25#define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
Georgios Pinitasda2491f2018-06-01 17:49:09 +010026
27#include "arm_compute/graph/Logger.h"
28#include "arm_compute/graph/Tensor.h"
29#include "arm_compute/graph/TypePrinter.h"
30#include "arm_compute/graph/Types.h"
Georgios Pinitas9e4824c2019-04-12 13:15:58 +010031#include "arm_compute/graph/Utils.h"
giuros01acce5042019-02-21 17:32:34 +000032#include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
Manuel Bottinibffb41e2019-06-20 16:00:27 +010033#include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010034#include "arm_compute/graph/backends/Utils.h"
35#include "arm_compute/graph/nodes/Nodes.h"
36
37#include "arm_compute/core/Error.h"
38#include "arm_compute/core/Helpers.h"
39#include "arm_compute/core/ITensorInfo.h"
Sang-Hoon Park68dd25f2020-10-19 16:00:11 +010040#include "support/Cast.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010041
42namespace arm_compute
43{
44namespace graph
45{
46namespace backends
47{
48namespace detail
49{
50/** Returns backing tensor of a given tensor
51 *
52 * @tparam TargetInfo Target information
53 *
54 * @param[in] tensor Tensor to extract the backing tensor from
55 *
56 * @return Backing tensor if present else nullptr
57 */
58template <typename TargetInfo>
59typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
60{
61 typename TargetInfo::TensorType *backing_tensor = nullptr;
62 if(tensor != nullptr)
63 {
64 ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
65 // Get backing tensor handle
66 ITensorHandle *tensor_handle = tensor->handle();
67 // Get backing tensor
68 backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
69 }
70
71 return backing_tensor;
72}
73
74template <typename TargetInfo>
75void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
76{
77 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
Pablo Tello32521432018-11-15 14:43:10 +000078 << " Target: " << TargetInfo::TargetType
79 << " ID: " << node.id()
80 << node.name()
Georgios Pinitasda2491f2018-06-01 17:49:09 +010081 << std::endl);
82
83 ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
84 ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
85 ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010086 ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
Georgios Pinitasda2491f2018-06-01 17:49:09 +010087}
88
89/** Creates a backend activation layer function
90 *
91 * @tparam ActivationLayerFunction Backend activation function
92 * @tparam TargetInfo Target-specific information
93 *
94 * @param[in] node Node to create the backend function for
95 *
96 * @return Backend activation layer function
97 */
98template <typename ActivationLayerFunction, typename TargetInfo>
99std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
100{
101 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
102
103 // Extract IO and info
104 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
105 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
106 const ActivationLayerInfo act_info = node.activation_info();
107
108 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000109 auto func = std::make_unique<ActivationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100110 func->configure(input, output, act_info);
111
Pablo Tello32521432018-11-15 14:43:10 +0000112 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
113 << node.name()
114 << " Type: " << node.type()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000115 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100116 << " Data Type: " << input->info()->data_type()
117 << " Shape: " << input->info()->tensor_shape()
118 << " Activation function: " << act_info.activation()
119 << " a: " << act_info.a()
120 << " b: " << act_info.b()
121 << " InPlace : " << is_in_place_operation(input, output)
122 << std::endl);
123
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100124 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100125}
126
thecha01e8f05da2020-08-24 17:21:41 +0100127/** Creates a backend argminmax layer function
128 *
129 * @tparam ArgMinMaxLayerFunction Backend activation function
130 * @tparam TargetInfo Target-specific information
131 *
132 * @param[in] node Node to create the backend function for
133 *
134 * @return Backend argminmax layer function
135 */
136template <typename ArgMinMaxLayerFunction, typename TargetInfo>
137std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
138{
139 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
140
141 // Extract IO and info
142 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
143 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
144 const ReductionOperation op = node.reduction_operation();
145 unsigned int axis = node.axis();
146
147 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000148 auto func = std::make_unique<ArgMinMaxLayerFunction>();
thecha01e8f05da2020-08-24 17:21:41 +0100149 func->configure(input, axis, output, op);
150
151 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
152 << node.name()
153 << " Type: " << node.type()
154 << " Target: " << TargetInfo::TargetType
155 << " Data Type: " << input->info()->data_type()
156 << " Shape: " << input->info()->tensor_shape()
157 << " Reduction Operation: " << op
158 << " axis: " << axis
159 << std::endl);
160
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100161 return std::move(func);
thecha01e8f05da2020-08-24 17:21:41 +0100162}
163
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100164/** Create a backend batch normalization layer function
165 *
166 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
167 * @tparam TargetInfo Target-specific information
168 *
169 * @param[in] node Node to create the backend function for
170 *
171 * @return Backend batch normalization layer function
172 */
173template <typename BatchNormalizationLayerFunction, typename TargetInfo>
174std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
175{
176 validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
177
178 // Extract IO and info
giuros01acce5042019-02-21 17:32:34 +0000179 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
180 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
181 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
182 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
183 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
184
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100185 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
186 const float epsilon = node.epsilon();
187 const ActivationLayerInfo fused_act = node.fused_activation();
188
189 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000190 auto func = std::make_unique<BatchNormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100191 func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
192
193 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000194 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
195 << node.name()
196 << " Type: " << node.type()
197 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100198 << " Data Type: " << input->info()->data_type()
199 << " Shape: " << input->info()->tensor_shape()
200 << " Epsilon: " << epsilon << " "
201 << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
Pablo Tello32521432018-11-15 14:43:10 +0000202 << " InPlace: " << is_in_place_operation(input, output)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100203 << std::endl);
204
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100205 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100206}
207
giuros01acce5042019-02-21 17:32:34 +0000208/** Create a backend batch normalization layer function
209 *
210 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
211 * @tparam TargetInfo Target-specific information
212 *
213 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000214 * @param[in] ctx Graph context
giuros01acce5042019-02-21 17:32:34 +0000215 *
216 * @return Backend batch normalization layer function
217 */
218template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000219std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
giuros01acce5042019-02-21 17:32:34 +0000220{
221 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
222
223 // Extract IO and info
224 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
225 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
226 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
227 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
228 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
229 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
230 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
231
232 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
233
234 const PadStrideInfo conv_info = node.convolution_info();
235 const unsigned int num_groups = node.num_groups();
236 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
237 const ActivationLayerInfo fused_act = node.fused_activation();
238 const float epsilon = node.epsilon();
239
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000240 // Create and configure function (we assume that functions have been validated before creation)
241 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
242 std::unique_ptr<IFunction> func;
243 std::string func_name;
244
245 using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
246
giuros01acce5042019-02-21 17:32:34 +0000247 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000248 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
249 std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
giuros01acce5042019-02-21 17:32:34 +0000250
251 // Log info
252 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
253 << node.name()
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100254 << " Type: " << node.type()
255 << " Target: " << TargetInfo::TargetType
256 << " Data Type: " << input->info()->data_type()
257 << " Input shape: " << input->info()->tensor_shape()
258 << " Weights shape: " << weights->info()->tensor_shape()
259 << " Output shape: " << output->info()->tensor_shape()
260 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
261 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100262 return std::move(func);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100263}
264
265/** Create a backend fused depthwise convolution batch normalization layer function
266 *
267 * @tparam FusedLayerTypes Fused layer types
268 * @tparam TargetInfo Target-specific information
269 *
270 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000271 * @param[in] ctx Graph context
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100272 *
273 * @return Backend fused depthwise convolution batch normalization layer function
274 */
275template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000276std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100277{
278 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
279
280 // Extract IO and info
281 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
282 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
283 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
284 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
285 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
286 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
287 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
288
289 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
290
291 const PadStrideInfo conv_info = node.convolution_info();
292 const unsigned int depth_multiplier = node.depth_multiplier();
293 const ActivationLayerInfo fused_act = node.fused_activation();
294 const float epsilon = node.epsilon();
295
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000296 // Create and configure function (we assume that functions have been validated before creation)
297 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
298 std::unique_ptr<IFunction> func;
299 std::string func_name;
300
301 using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
302
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100303 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000304 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
305 std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100306
307 // Log info
308 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
309 << node.name()
310 << " Type: " << node.type()
giuros01acce5042019-02-21 17:32:34 +0000311 << " Target: " << TargetInfo::TargetType
312 << " Data Type: " << input->info()->data_type()
313 << " Input shape: " << input->info()->tensor_shape()
314 << " Weights shape: " << weights->info()->tensor_shape()
315 << " Output shape: " << output->info()->tensor_shape()
316 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
317 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100318 return std::move(func);
giuros01acce5042019-02-21 17:32:34 +0000319}
320
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100321/** Create a backend bounding box transform layer function
322 *
323 * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
324 * @tparam TargetInfo Target-specific information
325 *
326 * @param[in] node Node to create the backend function for
327 *
328 * @return Backend bounding box transform layer function
329 */
330template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
331std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
332{
333 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
334
335 // Extract IO and info
336 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
337 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
338 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
339 const BoundingBoxTransformInfo bbox_info = node.info();
340
341 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000342 auto func = std::make_unique<BoundingBoxTransformLayerFunction>();
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100343 func->configure(input, output, deltas, bbox_info);
344
345 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000346 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
347 << node.name()
348 << " Type: " << node.type()
349 << " Target: " << TargetInfo::TargetType
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100350 << " Data Type: " << input->info()->data_type()
351 << " Shape: " << input->info()->tensor_shape()
352 << " BoundingBox Info img W: " << bbox_info.img_width() << " "
353 << " BoundingBox Info img H: " << bbox_info.img_height() << " "
354 << std::endl);
355
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100356 return std::move(func);
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100357}
358
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100359/** Create a backend channel shuffle layer function
360 *
361 * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
362 * @tparam TargetInfo Target-specific information
363 *
364 * @param[in] node Node to create the backend function for
365 *
366 * @return Backend channel shuffle layer function
367 */
368template <typename ChannelShuffleLayerFunction, typename TargetInfo>
369std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
370{
371 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
372
373 // Extract IO and info
374 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
375 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
376 const unsigned int num_groups = node.num_groups();
377
378 // Create function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000379 auto func = std::make_unique<ChannelShuffleLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100380 func->configure(input, output, num_groups);
381
Pablo Tello32521432018-11-15 14:43:10 +0000382 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
383 << node.name()
384 << " Type: " << node.type()
385 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100386 << " Data Type: " << input->info()->data_type()
387 << " Shape: " << input->info()->tensor_shape()
388 << " Num groups: " << num_groups
389 << std::endl);
390
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100391 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100392}
393
Georgios Pinitase2220552018-07-20 13:23:44 +0100394/** Create a backend layer concatenate function
395 *
396 * @tparam ConcatenateLayerFunction Backend concatenate function
397 * @tparam TargetInfo Target-specific information
398 *
399 * @param[in] node Node to create the backend function for
400 *
401 * @return Backend concatenate layer function
402 */
403template <typename ConcatenateLayerFunction, typename TargetInfo>
404std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
405{
406 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
407 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
408
409 // Return nullptr if depth concatenate is switched off
410 if(!node.is_enabled())
411 {
412 return nullptr;
413 }
414
415 // Extract IO and info
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100416 std::vector<typename TargetInfo::SrcTensorType *> inputs;
Georgios Pinitase2220552018-07-20 13:23:44 +0100417 for(unsigned int i = 0; i < node.num_inputs(); ++i)
418 {
419 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
420 }
421 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas9e4824c2019-04-12 13:15:58 +0100422 const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
423 const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
Georgios Pinitase2220552018-07-20 13:23:44 +0100424
425 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000426 auto func = std::make_unique<ConcatenateLayerFunction>();
Georgios Pinitase2220552018-07-20 13:23:44 +0100427 func->configure(inputs, output, concat_axis);
428
429 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000430 const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
431 std::ostringstream qss;
432 if(is_quantized)
433 {
434 qss << " Output QuantInfo: " << output->info()->quantization_info();
435 }
Pablo Tello32521432018-11-15 14:43:10 +0000436 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
437 << node.name()
438 << " Type: " << node.type()
439 << " Target: " << TargetInfo::TargetType
Georgios Pinitase2220552018-07-20 13:23:44 +0100440 << " Data Type: " << output->info()->data_type()
441 << " Shape: " << output->info()->tensor_shape()
442 << " Num Inputs: " << inputs.size()
443 << " Axis: " << concat_axis
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000444 << qss.str()
Georgios Pinitase2220552018-07-20 13:23:44 +0100445 << std::endl);
446
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100447 return std::move(func);
Georgios Pinitase2220552018-07-20 13:23:44 +0100448}
449
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100450/** Create a backend convolution layer function
451 *
452 * @tparam ConvolutionLayerFunctions Backend convolution functions
453 * @tparam TargetInfo Target-specific information
454 *
455 * @param[in] node Node to create the backend function for
456 * @param[in] ctx Graph context
457 *
458 * @return Backend convolution layer function
459 */
460template <typename ConvolutionLayerFunctions, typename TargetInfo>
461std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
462{
463 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
464
465 // Extract IO and info
466 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
467 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
468 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
469 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
470
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100471 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
472
473 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100474 {
475 biases->info()->set_data_type(DataType::S32);
476 }
477
Georgios Pinitas08346e92018-10-16 19:10:46 +0100478 const PadStrideInfo conv_info = node.convolution_info();
479 const unsigned int num_groups = node.num_groups();
480 const ConvolutionMethod conv_algorithm = node.convolution_method();
481 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
482 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100483
484 // Create and configure function (we assume that functions have been validated before creation)
485 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
486 std::unique_ptr<IFunction> func;
487 std::string func_name;
488
Georgios Pinitase2220552018-07-20 13:23:44 +0100489 if(conv_algorithm == ConvolutionMethod::Winograd)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100490 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100491 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100492 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
493 std::string("WinogradConvolutionLayer"), mm,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100494 input, weights, biases, output, conv_info, fused_act, fast_math);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100495 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100496 else if(conv_algorithm == ConvolutionMethod::Direct)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100497 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100498 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100499 std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
500 std::string("DirectConvolutionLayer"),
Georgios Pinitas08346e92018-10-16 19:10:46 +0100501 input, weights, biases, output, conv_info, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100502 }
503 else if(conv_algorithm == ConvolutionMethod::GEMM)
504 {
505 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
506 std::string("GEMMConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100507 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100508 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100509 }
510 else
511 {
512 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
513 std::string("GenericConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100514 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100515 WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100516 }
517
518 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100519 std::ostringstream qss;
520 if(is_quantized)
521 {
522 qss << " Input QuantInfo: " << input->info()->quantization_info()
523 << " Weights QuantInfo: " << weights->info()->quantization_info()
524 << " Output QuantInfo: " << output->info()->quantization_info();
525 }
Pablo Tello32521432018-11-15 14:43:10 +0000526 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
527 << node.name()
528 << " Type: " << func_name
529 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100530 << " Data Type: " << input->info()->data_type()
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100531 << " Groups: " << num_groups
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100532 << " Input shape: " << input->info()->tensor_shape()
533 << " Weights shape: " << weights->info()->tensor_shape()
534 << " Output shape: " << output->info()->tensor_shape()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000535 << qss.str()
Georgios Pinitas08346e92018-10-16 19:10:46 +0100536 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100537 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100538 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100539}
540
541/** Create a backend deconvolution layer function
542 *
543 * @tparam DeconvolutionLayerFunction Backend deconvolution function
544 * @tparam TargetInfo Target-specific information
545 *
546 * @param[in] node Node to create the backend function for
547 * @param[in] ctx Graph context
548 *
549 * @return Backend deconvolution layer function
550 */
551template <typename DeconvolutionLayerFunction, typename TargetInfo>
552std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
553{
554 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
555
556 // Extract IO and info
557 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
558 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
559 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
560 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
561
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100562 const PadStrideInfo deconv_info = node.deconvolution_info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100563
564 // Create and configure function (we assume that functions have been validated before creation)
565 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
566 std::unique_ptr<IFunction> func;
567
568 std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
569 std::string(), mm,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100570 input, weights, biases, output, deconv_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100571
572 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000573 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
574 << node.name()
575 << " Type: " << node.type()
576 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100577 << " Data Type: " << input->info()->data_type()
578 << " Input shape: " << input->info()->tensor_shape()
579 << " Weights shape: " << weights->info()->tensor_shape()
580 << " Output shape: " << output->info()->tensor_shape()
581 << std::endl);
582 return func;
583}
584
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100585/** Create a backend layer depth-wise convolution function
586 *
587 * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
588 * @tparam TargetInfo Target-specific information
589 *
590 * @param[in] node Node to create the backend function for
591 *
592 * @return Backend depth-wise convolution layer function
593 */
Manuel Bottini05069f02019-09-26 17:18:26 +0100594template <typename DepthwiseConvolutionLayer, typename TargetInfo>
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100595std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
596{
597 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
598
599 // Extract IO and info
600 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
601 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
602 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
603 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
604
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100605 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
606
607 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100608 {
609 biases->info()->set_data_type(DataType::S32);
610 }
611
Manuel Bottini05069f02019-09-26 17:18:26 +0100612 const PadStrideInfo conv_info = node.convolution_info();
613 const unsigned int depth_multiplier = node.depth_multiplier();
614 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100615
616 // Create and configure function (we assume that functions have been validated before creation)
617 std::unique_ptr<IFunction> func;
618 std::string func_name;
Manuel Bottini05069f02019-09-26 17:18:26 +0100619
620 std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
621 std::string("DepthwiseConvolutionLayer"),
622 input, weights, biases, output, conv_info, depth_multiplier, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100623
624 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100625 std::ostringstream qss;
626 if(is_quantized)
627 {
628 qss << " Input QuantInfo: " << input->info()->quantization_info()
629 << " Weights QuantInfo: " << weights->info()->quantization_info()
630 << " Output QuantInfo: " << output->info()->quantization_info();
631 }
Pablo Tello32521432018-11-15 14:43:10 +0000632 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
633 << node.name()
634 << " Type: " << func_name
635 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100636 << " Data Type: " << input->info()->data_type()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100637 << " Input shape: " << input->info()->tensor_shape()
638 << " Weights shape: " << weights->info()->tensor_shape()
639 << " Output shape: " << output->info()->tensor_shape()
Georgios Pinitas05045c12018-12-07 18:31:47 +0000640 << " Depth multiplier: " << depth_multiplier
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000641 << qss.str()
Georgios Pinitas60e98252018-10-22 16:17:20 +0100642 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100643 << std::endl);
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100644 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100645}
646
thecha010a05e6a2020-08-28 18:40:38 +0100647/** Create a backend depth to space layer function
648 *
649 * @tparam DepthToSpaceLayerNode Function Backend depth to space function
650 * @tparam TargetInfo Target-specific information
651 *
652 * @param[in] node Node to create the backend function for
653 *
654 * @return Backend depth to space layer function
655 */
656template <typename DepthToSpaceLayerFunction, typename TargetInfo>
657std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
658{
659 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
660
661 // Extract IO and info
662 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
663 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
664
665 ARM_COMPUTE_ERROR_ON(input == nullptr);
666 ARM_COMPUTE_ERROR_ON(output == nullptr);
667
668 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000669 auto func = std::make_unique<DepthToSpaceLayerFunction>();
thecha010a05e6a2020-08-28 18:40:38 +0100670 func->configure(input, output, node.block_shape());
671
672 // Log info
673 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
674 << node.name()
675 << " Type: " << node.type()
676 << " Target: " << TargetInfo::TargetType
677 << " Data Type: " << input->info()->data_type()
678 << " Input shape: " << input->info()->tensor_shape()
679 << " Block Size: " << node.block_shape()
680 << " Output shape: " << output->info()->tensor_shape()
681 << std::endl);
682
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100683 return std::move(func);
thecha010a05e6a2020-08-28 18:40:38 +0100684}
685
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000686/** Create a backend dequantize layer function
687 *
688 * @tparam DequantizationLayer Function Backend dequantize function
689 * @tparam TargetInfo Target-specific information
690 *
691 * @param[in] node Node to create the backend function for
692 *
693 * @return Backend dequantize layer function
694 */
695template <typename DequantizationLayerFunction, typename TargetInfo>
696std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
697{
698 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
699
700 // Extract IO and info
701 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
702 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
703
704 ARM_COMPUTE_ERROR_ON(input == nullptr);
705 ARM_COMPUTE_ERROR_ON(output == nullptr);
706
707 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000708 auto func = std::make_unique<DequantizationLayerFunction>();
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000709 func->configure(input, output);
710
711 // Log info
712 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
713 << node.name()
714 << " Type: " << node.type()
715 << " Target: " << TargetInfo::TargetType
716 << " Data Type: " << input->info()->data_type()
717 << " Input shape: " << input->info()->tensor_shape()
718 << " Input quantization info: " << output->info()->quantization_info()
719 << " Output shape: " << output->info()->tensor_shape()
720 << std::endl);
721
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100722 return std::move(func);
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000723}
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000724/** Create a backend detection output layer function
725 *
726 * @tparam DetectionOutputLayer Function Backend detection output function
727 * @tparam TargetInfo Target-specific information
728 *
729 * @param[in] node Node to create the backend function for
730 *
731 * @return Backend detection output layer function
732 */
733template <typename DetectionOutputLayerFunction, typename TargetInfo>
734std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
735{
736 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
737
738 // Extract IO and info
739 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
740 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
741 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
742 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
743 const DetectionOutputLayerInfo detect_info = node.detection_output_info();
744
745 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
746 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
747 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
748 ARM_COMPUTE_ERROR_ON(output == nullptr);
749
750 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000751 auto func = std::make_unique<DetectionOutputLayerFunction>();
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000752 func->configure(input0, input1, input2, output, detect_info);
753
754 // Log info
755 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
756 << node.name()
757 << " Type: " << node.type()
758 << " Target: " << TargetInfo::TargetType
759 << " Data Type: " << input0->info()->data_type()
760 << " Input0 shape: " << input0->info()->tensor_shape()
761 << " Input1 shape: " << input1->info()->tensor_shape()
762 << " Input2 shape: " << input2->info()->tensor_shape()
763 << " Output shape: " << output->info()->tensor_shape()
764 << " DetectionOutputLayer info: " << detect_info
765 << std::endl);
766
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100767 return std::move(func);
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000768}
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000769
770/** Create a backend detection post process layer function
771 *
772 * @tparam DetectionPostProcessLayerFunction Backend detection output function
773 * @tparam TargetInfo Target-specific information
774 *
775 * @param[in] node Node to create the backend function for
776 *
777 * @return Backend detection post process layer function
778 */
779template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
780std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
781{
782 validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
783
784 // Extract IO and info
785 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
786 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
787 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
788 typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
789 typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
790 typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
791 typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
792 const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
793
794 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
795 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
796 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
797 ARM_COMPUTE_ERROR_ON(output0 == nullptr);
798 ARM_COMPUTE_ERROR_ON(output1 == nullptr);
799 ARM_COMPUTE_ERROR_ON(output2 == nullptr);
800 ARM_COMPUTE_ERROR_ON(output3 == nullptr);
801
802 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000803 auto func = std::make_unique<DetectionPostProcessLayerFunction>();
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000804 func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
805
806 // Log info
807 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
808 << node.name()
809 << " Type: " << node.type()
810 << " Target: " << TargetInfo::TargetType
811 << " Data Type: " << input0->info()->data_type()
812 << " Input0 shape: " << input0->info()->tensor_shape()
813 << " Input1 shape: " << input1->info()->tensor_shape()
814 << " Input2 shape: " << input2->info()->tensor_shape()
815 << " Output0 shape: " << output0->info()->tensor_shape()
816 << " Output1 shape: " << output1->info()->tensor_shape()
817 << " Output2 shape: " << output2->info()->tensor_shape()
818 << " Output3 shape: " << output3->info()->tensor_shape()
819 << " DetectionPostProcessLayer info: " << detect_info
820 << std::endl);
821
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100822 return std::move(func);
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000823}
824
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100825/** Create a backend element-wise operation layer function
826 *
827 * @tparam EltwiseFunctions Backend element-wise function
828 * @tparam TargetInfo Target-specific information
829 *
830 * @param[in] node Node to create the backend function for
831 *
832 * @return Backend element-wise operation layer function
833 */
834template <typename EltwiseFunctions, typename TargetInfo>
835std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
836{
837 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
838
839 // Extract IO and info
840 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
841 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
842 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
843 const EltwiseOperation eltwise_op = node.eltwise_operation();
844 const ConvertPolicy convert_policy = node.convert_policy();
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000845 const ActivationLayerInfo act_info = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100846 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
847 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
848 ARM_COMPUTE_ERROR_ON(output == nullptr);
849
850 std::unique_ptr<IFunction> func = nullptr;
851 std::string func_name;
Georgios Pinitase2220552018-07-20 13:23:44 +0100852 if(eltwise_op == EltwiseOperation::Add)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100853 {
854 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
855 std::string("ArithmeticAddition"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000856 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100857 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100858 else if(eltwise_op == EltwiseOperation::Sub)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100859 {
860 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
861 std::string("ArithmeticSubtraction"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000862 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100863 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100864 else if(eltwise_op == EltwiseOperation::Mul)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100865 {
866 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
867 std::string("PixelWiseMultiplication"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000868 input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100869 }
thecha01f8e35842020-07-28 17:28:17 +0100870 else if(eltwise_op == EltwiseOperation::Max)
871 {
872 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
873 std::string("ElementwiseMaximum"),
874 input1, input2, output, act_info);
875 }
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100876 else
877 {
878 ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
879 }
880
881 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000882 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
883 << node.name()
884 << " Type: " << node.type()
885 << " Target: " << TargetInfo::TargetType
886 << " Operation: " << func_name
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100887 << " Data Type: " << input1->info()->data_type()
Pablo Tello32521432018-11-15 14:43:10 +0000888 << " Shape: " << input1->info()->tensor_shape()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100889 << std::endl);
890
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100891 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100892}
893
Sheri Zhang16dddd22020-05-27 15:03:48 +0100894/** Create a backend unary element-wise operation layer function
895 *
896 * @tparam UnaryEltwiseFunctions Backend unary element-wise function
897 * @tparam TargetInfo Target-specific information
898 *
899 * @param[in] node Node to create the backend function for
900 *
901 * @return Backend unary element-wise operation layer function
902 */
903template <typename UnaryEltwiseFunctions, typename TargetInfo>
904std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
905{
906 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
907
908 // Extract IO and info
909 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
910 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
911 const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
912
913 ARM_COMPUTE_ERROR_ON(input == nullptr);
914 ARM_COMPUTE_ERROR_ON(output == nullptr);
915
916 std::unique_ptr<IFunction> func = nullptr;
917 std::string func_name;
918 if(eltwise_op == UnaryEltwiseOperation::Exp)
919 {
920 std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
921 std::string("Exp"),
922 input, output);
923 }
924 else
925 {
926 ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
927 }
928
929 // Log info
930 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
931 << node.name()
932 << " Type: " << node.type()
933 << " Target: " << TargetInfo::TargetType
934 << " Operation: " << func_name
935 << " Data Type: " << input->info()->data_type()
936 << " Shape: " << input->info()->tensor_shape()
937 << std::endl);
938
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100939 return std::move(func);
Sheri Zhang16dddd22020-05-27 15:03:48 +0100940}
941
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100942/** Create a backend flatten layer function
943 *
944 * @tparam FlattenLayerFunction Backend flatten function
945 * @tparam TargetInfo Target-specific information
946 *
947 * @param[in] node Node to create the backend function for
948 *
949 * @return Backend flatten layer function
950 */
951template <typename FlattenLayerFunction, typename TargetInfo>
952std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
953{
954 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
955
956 // Extract IO and info
957 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
958 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
959
Georgios Pinitase2220552018-07-20 13:23:44 +0100960 ARM_COMPUTE_ERROR_ON(input == nullptr);
961 ARM_COMPUTE_ERROR_ON(output == nullptr);
962
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100963 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +0000964 auto func = std::make_unique<FlattenLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100965 func->configure(input, output);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100966
967 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000968 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
969 << node.name()
970 << " Type: " << node.type()
971 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100972 << " Data Type: " << input->info()->data_type()
973 << " Input shape: " << input->info()->tensor_shape()
974 << " Output shape: " << output->info()->tensor_shape()
975 << std::endl);
976
Georgios Pinitas4d9687e2020-10-21 18:33:36 +0100977 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100978}
979
980/** Create a backend fully connected layer function
981 *
982 * @tparam FullyConnectedLayerFunction Backend fully-connected function
983 * @tparam TargetInfo Target-specific information
984 *
985 * @param[in] node Node to create the backend function for
986 * @param[in] ctx Graph context
987 *
988 * @return Backend fully connected layer function
989 */
990template <typename FullyConnectedLayerFunction, typename TargetInfo>
991std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
992{
993 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
994
995 // Extract IO and info
996 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
997 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
998 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
999 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +01001000 const FullyConnectedLayerInfo fc_info = node.info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001001
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001002 ARM_COMPUTE_ERROR_ON(input == nullptr);
1003 ARM_COMPUTE_ERROR_ON(weights == nullptr);
1004 ARM_COMPUTE_ERROR_ON(output == nullptr);
1005
Georgios Pinitase2220552018-07-20 13:23:44 +01001006 // Create and configure function
Michalis Spyrou1a569a32019-09-10 17:20:34 +01001007 auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1008 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001009 auto func = std::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
Georgios Pinitase2220552018-07-20 13:23:44 +01001010 func->configure(input, weights, biases, output, fc_info);
1011
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001012 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1013
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001014 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001015 std::ostringstream qss;
1016 if(is_quantized)
1017 {
1018 qss << " Input QuantInfo: " << input->info()->quantization_info()
1019 << " Weights QuantInfo: " << weights->info()->quantization_info()
1020 << " Output QuantInfo: " << output->info()->quantization_info();
1021 }
Pablo Tello32521432018-11-15 14:43:10 +00001022 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1023 << node.name()
1024 << " Type: " << node.type()
1025 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001026 << " Data Type: " << input->info()->data_type()
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001027 << qss.str()
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001028 << " Input shape: " << input->info()->tensor_shape()
1029 << " Weights shape: " << weights->info()->tensor_shape()
1030 << " Output shape: " << output->info()->tensor_shape()
1031 << std::endl);
1032
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001033 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001034}
1035
Manuel Bottini5209be52019-02-13 16:34:56 +00001036/** Create a backend generate proposals layer function
1037 *
1038 * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1039 * @tparam TargetInfo Target-specific information
1040 *
1041 * @param[in] node Node to create the backend function for
1042 * @param[in] ctx Graph context
1043 *
1044 * @return Backend generate proposals layer function
1045 */
1046template <typename GenerateProposalsLayerFunction, typename TargetInfo>
1047std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1048{
1049 validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1050
1051 // Extract IO and info
1052 typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1053 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1054 typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1055 typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1056 typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1057 typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1058 const GenerateProposalsInfo info = node.info();
1059
1060 ARM_COMPUTE_ERROR_ON(scores == nullptr);
1061 ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1062 ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1063 ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1064 ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1065
1066 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001067 auto func = std::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Manuel Bottini5209be52019-02-13 16:34:56 +00001068 func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1069
1070 // Log info
1071 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1072 << " Target " << TargetInfo::TargetType
1073 << " Data Type: " << scores->info()->data_type()
1074 << " Scores shape: " << scores->info()->tensor_shape()
1075 << " Deltas shape: " << deltas->info()->tensor_shape()
1076 << " Anchors shape: " << anchors->info()->tensor_shape()
1077 << " Proposals shape: " << proposals->info()->tensor_shape()
1078 << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1079 << " Scores Out shape: " << scores_out->info()->tensor_shape()
1080 << std::endl);
1081
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001082 return std::move(func);
Manuel Bottini5209be52019-02-13 16:34:56 +00001083}
1084
thecha013603aff2020-09-01 14:52:38 +01001085/** Create a backend l2 normalization layer function
1086 *
1087 * @tparam NormalizationLayerFunction Backend normalization function
1088 * @tparam TargetInfo Target-specific information
1089 *
1090 * @param[in] node Node to create the backend function for
1091 * @param[in] ctx Graph context
1092 *
1093 * @return Backend normalization layer function
1094 */
1095template <typename L2NormalizeLayerFunction, typename TargetInfo>
1096std::unique_ptr<IFunction> create_l2_normalize_layer(L2NormalizeLayerNode &node, GraphContext &ctx)
1097{
1098 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1099
1100 // Extract IO and info
1101 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1102 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1103 int axis = node.axis();
1104 float epsilon = node.epsilon();
1105
1106 ARM_COMPUTE_ERROR_ON(input == nullptr);
1107 ARM_COMPUTE_ERROR_ON(output == nullptr);
1108
1109 // Create and configure function
1110 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001111 auto func = std::make_unique<L2NormalizeLayerFunction>(mm);
thecha013603aff2020-09-01 14:52:38 +01001112 func->configure(input, output, axis, epsilon);
1113
1114 // Log info
1115 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1116 << node.name()
1117 << " Type: " << node.type()
1118 << " Target: " << TargetInfo::TargetType
1119 << " Data Type: " << input->info()->data_type()
1120 << " Input shape: " << input->info()->tensor_shape()
1121 << " Output shape: " << output->info()->tensor_shape()
1122 << " Axis: " << axis
1123 << " Epsilon: " << epsilon
1124 << std::endl);
1125
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001126 return std::move(func);
thecha013603aff2020-09-01 14:52:38 +01001127}
1128
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001129/** Create a backend normalization layer function
1130 *
1131 * @tparam NormalizationLayerFunction Backend normalization function
1132 * @tparam TargetInfo Target-specific information
1133 *
1134 * @param[in] node Node to create the backend function for
1135 * @param[in] ctx Graph context
1136 *
1137 * @return Backend normalization layer function
1138 */
1139template <typename NormalizationLayerFunction, typename TargetInfo>
1140std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1141{
1142 ARM_COMPUTE_UNUSED(ctx);
1143
1144 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1145
1146 // Extract IO and info
1147 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1148 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1149 const NormalizationLayerInfo norm_info = node.normalization_info();
1150 ARM_COMPUTE_ERROR_ON(input == nullptr);
1151 ARM_COMPUTE_ERROR_ON(output == nullptr);
1152
1153 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001154 auto func = std::make_unique<NormalizationLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001155 func->configure(input, output, norm_info);
1156
1157 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001158 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1159 << node.name()
1160 << " Type: " << node.type()
1161 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001162 << " Data Type: " << input->info()->data_type()
1163 << " Input shape: " << input->info()->tensor_shape()
1164 << " Output shape: " << output->info()->tensor_shape()
1165 << " Normalization info: " << norm_info.type()
1166 << std::endl);
1167
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001168 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001169}
1170
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001171/** Create a backend normalize planar YUV layer function
1172 *
1173 * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1174 * @tparam TargetInfo Target-specific information
1175 *
1176 * @param[in] node Node to create the backend function for
1177 *
1178 * @return Backend normalize plnar YUV layer function
1179 */
1180template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
1181std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1182{
1183 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1184
1185 // Extract IO and info
1186 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1187 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1188 typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1189 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1190 ARM_COMPUTE_ERROR_ON(input == nullptr);
1191 ARM_COMPUTE_ERROR_ON(mean == nullptr);
1192 ARM_COMPUTE_ERROR_ON(std == nullptr);
1193 ARM_COMPUTE_ERROR_ON(output == nullptr);
1194
1195 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001196 auto func = std::make_unique<NormalizePlanarYUVLayerFunction>();
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001197 func->configure(input, output, mean, std);
1198
1199 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001200 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1201 << node.name()
1202 << " Type: " << node.type()
1203 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001204 << " Data Type: " << input->info()->data_type()
1205 << " Shape: " << input->info()->tensor_shape()
1206 << std::endl);
1207
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001208 return std::move(func);
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001209}
1210
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001211/** Create a backend pad layer function
1212 *
1213 * @tparam PadLayerFunction Backend pad function
1214 * @tparam TargetInfo Target-specific information
1215 *
1216 * @param[in] node Node to create the backend function for
1217 *
1218 * @return Backend pad layer function
1219 */
1220template <typename PadLayerFunction, typename TargetInfo>
1221std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1222{
1223 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1224
1225 // Extract IO and info
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001226 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1227 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1228 const PaddingList &padding = node.padding();
1229 const PixelValue pad_value = node.pad_value();
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001230 ARM_COMPUTE_ERROR_ON(input == nullptr);
1231 ARM_COMPUTE_ERROR_ON(output == nullptr);
1232
1233 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001234 auto func = std::make_unique<PadLayerFunction>();
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001235 func->configure(input, output, padding, pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001236
1237 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001238 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1239 << node.name()
1240 << " Type: " << node.type()
1241 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001242 << " Data Type: " << input->info()->data_type()
1243 << " Input shape: " << input->info()->tensor_shape()
1244 << " Output shape: " << output->info()->tensor_shape()
1245 << std::endl);
1246
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001247 return std::move(func);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001248}
1249
Georgios Pinitas57c48242018-08-02 13:41:49 +01001250/** Create a backend permute layer function
1251 *
1252 * @tparam PermuteLayerFunction Backend permute function
1253 * @tparam TargetInfo Target-specific information
1254 *
1255 * @param[in] node Node to create the backend function for
1256 *
1257 * @return Backend permute layer function
1258 */
1259template <typename PermuteLayerFunction, typename TargetInfo>
1260std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1261{
1262 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1263
1264 // Extract IO and info
1265 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1266 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1267 const PermutationVector &perm = node.permutation_vector();
1268 ARM_COMPUTE_ERROR_ON(input == nullptr);
1269 ARM_COMPUTE_ERROR_ON(output == nullptr);
1270
1271 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001272 auto func = std::make_unique<PermuteLayerFunction>();
Georgios Pinitas57c48242018-08-02 13:41:49 +01001273 func->configure(input, output, perm);
1274
1275 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001276 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1277 << node.name()
1278 << " Type: " << node.type()
1279 << " Target: " << TargetInfo::TargetType
Georgios Pinitas57c48242018-08-02 13:41:49 +01001280 << " Data Type: " << input->info()->data_type()
1281 << " Input shape: " << input->info()->tensor_shape()
1282 << " Output shape: " << output->info()->tensor_shape()
1283 << " Permutation vector: " << perm
1284 << std::endl);
1285
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001286 return std::move(func);
Georgios Pinitas57c48242018-08-02 13:41:49 +01001287}
1288
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001289/** Create a backend pooling layer function
1290 *
1291 * @tparam PoolingLayerFunction Backend pooling function
1292 * @tparam TargetInfo Target-specific information
1293 *
1294 * @param[in] node Node to create the backend function for
1295 *
1296 * @return Backend pooling layer function
1297 */
1298template <typename PoolingLayerFunction, typename TargetInfo>
1299std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1300{
1301 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1302
1303 // Extract IO and info
1304 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1305 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1306 const PoolingLayerInfo pool_info = node.pooling_info();
1307 ARM_COMPUTE_ERROR_ON(input == nullptr);
1308 ARM_COMPUTE_ERROR_ON(output == nullptr);
1309
1310 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001311 auto func = std::make_unique<PoolingLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001312 func->configure(input, output, pool_info);
1313
1314 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001315 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1316 << node.name()
1317 << " Type: " << node.type()
1318 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001319 << " Data Type: " << input->info()->data_type()
1320 << " Input shape: " << input->info()->tensor_shape()
1321 << " Output shape: " << output->info()->tensor_shape()
Sang-Hoon Park0cb3da62020-01-15 12:39:56 +00001322 << " Pooling info: " << pool_info.pool_type
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001323 << std::endl);
1324
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001325 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001326}
1327
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001328/** Create a backend PRelu layer function
1329 *
1330 * @tparam PReluFunction Backend PRelu function
1331 * @tparam TargetInfo Target-specific information
1332 *
1333 * @param[in] node Node to create the backend function for
1334 *
1335 * @return Backend PRelu layer function
1336 */
1337template <typename PReluFunction, typename TargetInfo>
1338std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1339{
1340 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1341
1342 // Extract IO and info
1343 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1344 typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1345 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1346 ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1347 ARM_COMPUTE_ERROR_ON(output == nullptr);
1348
1349 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001350 auto func = std::make_unique<PReluFunction>();
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001351 func->configure(input, alpha, output);
1352
1353 // Log info
1354 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1355 << node.name()
1356 << " Type: " << node.type()
1357 << " Target: " << TargetInfo::TargetType
1358 << " Data Type: " << input->info()->data_type()
1359 << " Input shape: " << input->info()->tensor_shape()
1360 << " Output shape: " << output->info()->tensor_shape()
1361 << std::endl);
1362
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001363 return std::move(func);
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001364}
1365
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001366/** Create a backend print layer function
1367 *
1368 * @tparam TargetInfo Target-specific information
1369 *
1370 * @param[in] node Node to create the backend function for
1371 *
1372 * @return Backend print layer function
1373 */
1374template <typename TargetInfo>
1375std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1376{
1377 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1378
1379 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1380 ARM_COMPUTE_ERROR_ON(input == nullptr);
1381 ARM_COMPUTE_UNUSED(input);
1382
1383 // Log info
1384 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1385 << node.name()
1386 << " Type: " << node.type()
1387 << " Target: " << TargetInfo::TargetType
1388 << " Data Type: " << input->info()->data_type()
1389 << " Input shape: " << input->info()->tensor_shape()
1390 << std::endl);
1391
1392 return nullptr;
1393}
1394
Pablo Tello32521432018-11-15 14:43:10 +00001395/** Create a backend priorbox layer function
1396 *
1397 * @tparam PriorBoxLayerFunction Backend priorbox function
1398 * @tparam TargetInfo Target-specific information
1399 *
1400 * @param[in] node Node to create the backend function for
1401 *
1402 * @return Backend priorbox layer function
1403 */
1404template <typename PriorBoxLayerFunction, typename TargetInfo>
1405std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1406{
1407 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1408
1409 // Extract IO and info
1410 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1411 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1412 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1413 const PriorBoxLayerInfo prior_info = node.priorbox_info();
1414 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1415 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1416 ARM_COMPUTE_ERROR_ON(output == nullptr);
1417
1418 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001419 auto func = std::make_unique<PriorBoxLayerFunction>();
Pablo Tello32521432018-11-15 14:43:10 +00001420 func->configure(input0, input1, output, prior_info);
1421
1422 // Log info
1423 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1424 << node.name()
1425 << " Type: " << node.type()
1426 << " Target: " << TargetInfo::TargetType
1427 << " Data Type: " << input0->info()->data_type()
1428 << " Input0 shape: " << input0->info()->tensor_shape()
1429 << " Input1 shape: " << input1->info()->tensor_shape()
1430 << " Output shape: " << output->info()->tensor_shape()
1431 << " PriorBoxLayer info: " << prior_info
1432 << std::endl);
1433
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001434 return std::move(func);
Pablo Tello32521432018-11-15 14:43:10 +00001435}
1436
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001437/** Create a backend quantization layer function
1438 *
1439 * @tparam QuantizationLayerFunction Backend quantization function
1440 * @tparam TargetInfo Target-specific information
1441 *
1442 * @param[in] node Node to create the backend function for
1443 *
1444 * @return Backend quantization layer function
1445 */
1446template <typename QuantizationLayerFunction, typename TargetInfo>
1447std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1448{
1449 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1450
1451 // Extract IO and info
1452 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1453 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1454 ARM_COMPUTE_ERROR_ON(input == nullptr);
1455 ARM_COMPUTE_ERROR_ON(output == nullptr);
1456
1457 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001458 auto func = std::make_unique<QuantizationLayerFunction>();
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001459 func->configure(input, output);
1460
1461 // Log info
1462 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1463 << node.name()
1464 << " Type: " << node.type()
1465 << " Target: " << TargetInfo::TargetType
1466 << " Data Type: " << input->info()->data_type()
1467 << " Input shape: " << input->info()->tensor_shape()
1468 << " Output shape: " << output->info()->tensor_shape()
1469 << std::endl);
1470
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001471 return std::move(func);
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001472}
1473
thecha01d64444b2020-09-07 14:50:21 +01001474/** Create a backend reduction operation layer function
1475 *
1476 * @tparam ReductionOperationFunction Backend reduction operation function
1477 * @tparam TargetInfo Target-specific information
1478 *
1479 * @param[in] node Node to create the backend function for
1480 * @param[in] ctx Graph context
1481 *
1482 * @return Backend reduction sum layer function
1483 */
1484template <typename ReductionOperationFunction, typename TargetInfo>
1485std::unique_ptr<IFunction> create_reduction_operation_layer(ReductionLayerNode &node, GraphContext &ctx)
1486{
1487 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1488
1489 // Extract IO and info
1490 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1491 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1492 ReductionOperation op = node.op();
1493 int axis = node.axis();
1494 bool keep_dims = node.keep_dims();
1495 ARM_COMPUTE_ERROR_ON(input == nullptr);
1496 ARM_COMPUTE_ERROR_ON(output == nullptr);
1497
1498 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001499 auto func = std::make_unique<ReductionOperationFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
thecha01d64444b2020-09-07 14:50:21 +01001500 func->configure(input, output, axis, op, keep_dims);
1501
1502 // Log info
1503 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1504 << node.name()
1505 << " Type: " << node.type()
1506 << " Target: " << TargetInfo::TargetType
1507 << " Data Type: " << input->info()->data_type()
1508 << " Input shape: " << input->info()->tensor_shape()
1509 << " Output shape: " << output->info()->tensor_shape()
1510 << " Operation: " << op
1511 << " Axis: " << axis
1512 << " Keep dimensions:" << keep_dims
1513 << std::endl);
1514
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001515 return std::move(func);
thecha01d64444b2020-09-07 14:50:21 +01001516}
1517
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001518/** Create a backend reorg layer function
1519 *
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001520 * @tparam ReorgLayerFunction Backend reorg function
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001521 * @tparam TargetInfo Target-specific information
1522 *
1523 * @param[in] node Node to create the backend function for
1524 *
1525 * @return Backend reshape layer function
1526 */
1527template <typename ReorgLayerFunction, typename TargetInfo>
1528std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1529{
1530 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1531
1532 // Extract IO and info
1533 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1534 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1535 ARM_COMPUTE_ERROR_ON(input == nullptr);
1536 ARM_COMPUTE_ERROR_ON(output == nullptr);
1537
1538 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001539 auto func = std::make_unique<ReorgLayerFunction>();
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001540 func->configure(input, output, node.stride());
1541
1542 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001543 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1544 << node.name()
1545 << " Type: " << node.type()
1546 << " Target: " << TargetInfo::TargetType
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001547 << " Data Type: " << input->info()->data_type()
1548 << " Input shape: " << input->info()->tensor_shape()
1549 << " Output shape: " << output->info()->tensor_shape()
1550 << std::endl);
1551
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001552 return std::move(func);
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001553}
1554
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001555/** Create a backend reshape layer function
1556 *
1557 * @tparam ReshapeLayerFunction Backend reshape function
1558 * @tparam TargetInfo Target-specific information
1559 *
1560 * @param[in] node Node to create the backend function for
1561 *
1562 * @return Backend reshape layer function
1563 */
1564template <typename ReshapeLayerFunction, typename TargetInfo>
1565std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1566{
1567 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1568
1569 // Extract IO and info
1570 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1571 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1572 ARM_COMPUTE_ERROR_ON(input == nullptr);
1573 ARM_COMPUTE_ERROR_ON(output == nullptr);
1574
1575 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001576 auto func = std::make_unique<ReshapeLayerFunction>();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001577 func->configure(input, output);
1578
1579 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001580 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1581 << node.name()
1582 << " Type: " << node.type()
1583 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001584 << " Data Type: " << input->info()->data_type()
1585 << " Input shape: " << input->info()->tensor_shape()
1586 << " Output shape: " << output->info()->tensor_shape()
1587 << std::endl);
1588
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001589 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001590}
1591
1592/** Create a backend resize layer function
1593 *
1594 * @tparam ResizeLayerFunction Backend resize function
1595 * @tparam TargetInfo Target-specific information
1596 *
1597 * @param[in] node Node to create the backend function for
1598 *
1599 * @return Backend resize layer function
1600 */
1601template <typename ResizeLayerFunction, typename TargetInfo>
1602std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1603{
1604 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1605
1606 // Extract IO and info
1607 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1608 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1609 ARM_COMPUTE_ERROR_ON(input == nullptr);
1610 ARM_COMPUTE_ERROR_ON(output == nullptr);
1611 const InterpolationPolicy policy = node.policy();
1612
1613 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001614 auto func = std::make_unique<ResizeLayerFunction>();
Georgios Pinitasc53266e2020-12-09 03:11:53 +00001615 func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT, PixelValue(), SamplingPolicy::CENTER, false, false });
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001616
1617 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001618 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1619 << node.name()
1620 << " Type: " << node.type()
1621 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001622 << " Data Type: " << input->info()->data_type()
1623 << " Input shape: " << input->info()->tensor_shape()
1624 << " Output shape: " << output->info()->tensor_shape()
1625 << " Interpolation: " << policy
1626 << std::endl);
1627
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001628 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001629}
1630
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001631/** Create a backend ROI align layer function
1632 *
1633 * @tparam ROIAlignLayerFunction ROI Align function
1634 * @tparam TargetInfo Target-specific information
1635 *
1636 * @param[in] node Node to create the backend function for
1637 *
1638 * @return ROI Align layer function
1639 */
1640template <typename ROIAlignLayerFunction, typename TargetInfo>
1641std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1642{
1643 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1644
1645 // Extract IO and info
1646 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1647 typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1648 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1649 ARM_COMPUTE_ERROR_ON(input == nullptr);
1650 ARM_COMPUTE_ERROR_ON(output == nullptr);
1651 ARM_COMPUTE_ERROR_ON(rois == nullptr);
1652
1653 const ROIPoolingLayerInfo pool_info = node.pooling_info();
1654
1655 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001656 auto func = std::make_unique<ROIAlignLayerFunction>();
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001657
1658 func->configure(input, rois, output, pool_info);
1659
1660 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +00001661 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1662 << node.name()
1663 << " Type: " << node.type()
1664 << " Target: " << TargetInfo::TargetType
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001665 << " Data Type: " << input->info()->data_type()
1666 << " Input shape: " << input->info()->tensor_shape()
1667 << " Output shape: " << output->info()->tensor_shape()
1668 << " ROIs shape: " << rois->info()->tensor_shape()
1669 << " ROIPooling width: " << pool_info.pooled_width()
1670 << " ROIPooling height: " << pool_info.pooled_height()
1671 << std::endl);
1672
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001673 return std::move(func);
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001674}
1675
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001676/** Create a backend slice layer function
1677 *
1678 * @tparam SliceLayerFunction Backend slice function
1679 * @tparam TargetInfo Target-specific information
1680 *
1681 * @param[in] node Node to create the backend function for
1682 *
1683 * @return Backend slice layer function
1684 */
1685template <typename SliceLayerFunction, typename TargetInfo>
1686std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1687{
1688 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1689
1690 // Extract IO and info
1691 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1692 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1693 ARM_COMPUTE_ERROR_ON(input == nullptr);
1694 ARM_COMPUTE_ERROR_ON(output == nullptr);
1695
1696 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001697 auto func = std::make_unique<SliceLayerFunction>();
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001698 func->configure(input, output, node.starts(), node.ends());
1699
1700 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001701 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1702 << node.name()
1703 << " Type: " << node.type()
1704 << " Target: " << TargetInfo::TargetType
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001705 << " Data Type: " << input->info()->data_type()
1706 << " Input shape: " << input->info()->tensor_shape()
1707 << " Output shape: " << output->info()->tensor_shape()
1708 << std::endl);
1709
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001710 return std::move(func);
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001711}
1712
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001713/** Create a backend softmax layer function
1714 *
1715 * @tparam SoftmaxLayerFunction Backend softmax function
1716 * @tparam TargetInfo Target-specific information
1717 *
1718 * @param[in] node Node to create the backend function for
1719 * @param[in] ctx Graph context
1720 *
1721 * @return Backend softmax layer function
1722 */
1723template <typename SoftmaxLayerFunction, typename TargetInfo>
1724std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1725{
1726 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1727
1728 // Extract IO and info
1729 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1730 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1731 const float beta = node.beta();
1732 ARM_COMPUTE_ERROR_ON(input == nullptr);
1733 ARM_COMPUTE_ERROR_ON(output == nullptr);
1734
1735 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001736 auto func = std::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001737 func->configure(input, output, beta);
1738
1739 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001740 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1741 << node.name()
1742 << " Type: " << node.type()
1743 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001744 << " Data Type: " << input->info()->data_type()
1745 << " Input shape: " << input->info()->tensor_shape()
1746 << " Output shape: " << output->info()->tensor_shape()
1747 << std::endl);
1748
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001749 return std::move(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001750}
Michele Di Giorgioec699752019-03-22 15:25:32 +00001751
1752/** Create a backend layer stack function
1753 *
1754 * @tparam StackLayerFunction Backend stack function
1755 * @tparam TargetInfo Target-specific information
1756 *
1757 * @param[in] node Node to create the backend function for
1758 *
1759 * @return Backend stack layer function
1760 */
1761template <typename StackLayerFunction, typename TargetInfo>
1762std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1763{
1764 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1765 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1766
1767 // Extract IO and info
1768 std::vector<typename TargetInfo::TensorType *> inputs;
1769 for(unsigned int i = 0; i < node.num_inputs(); ++i)
1770 {
1771 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1772 }
1773 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1774 const int axis = node.axis();
1775
1776 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001777 auto func = std::make_unique<StackLayerFunction>();
Michele Di Giorgioec699752019-03-22 15:25:32 +00001778 func->configure(inputs, axis, output);
1779
1780 // Log info
1781 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1782 << node.name()
1783 << " Type: " << node.type()
1784 << " Target: " << TargetInfo::TargetType
1785 << " Data Type: " << output->info()->data_type()
1786 << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1787 << " Output shape: " << output->info()->tensor_shape()
1788 << " Num Inputs: " << inputs.size()
1789 << " Axis: " << axis
1790 << std::endl);
1791
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001792 return std::move(func);
Michele Di Giorgioec699752019-03-22 15:25:32 +00001793}
thecha012bfadd92020-08-12 17:25:51 +01001794
1795/** Create a backend slice layer function
1796 *
1797 * @tparam StridedSliceLayerFunction Backend strided slice function
1798 * @tparam TargetInfo Target-specific information
1799 *
1800 * @param[in] node Node to create the backend function for
1801 *
1802 * @return Backend strided slice layer function
1803 */
1804template <typename StridedSliceLayerFunction, typename TargetInfo>
1805std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1806{
1807 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1808
1809 // Extract IO and info
1810 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1811 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1812 Coordinates starts = node.starts();
1813 Coordinates ends = node.ends();
1814 BiStrides strides = node.strides();
1815 StridedSliceLayerInfo info = node.strided_slice_info();
1816
1817 ARM_COMPUTE_ERROR_ON(input == nullptr);
1818 ARM_COMPUTE_ERROR_ON(output == nullptr);
1819
1820 // Create and configure function
Georgios Pinitas40f51a62020-11-21 03:04:18 +00001821 auto func = std::make_unique<StridedSliceLayerFunction>();
thecha012bfadd92020-08-12 17:25:51 +01001822 func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
1823
1824 // Log info
1825 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1826 << node.name()
1827 << " Type: " << node.type()
1828 << " Target: " << TargetInfo::TargetType
1829 << " Data Type: " << input->info()->data_type()
1830 << " Input shape: " << input->info()->tensor_shape()
1831 << " Output shape: " << output->info()->tensor_shape()
1832 << std::endl);
1833
Georgios Pinitas4d9687e2020-10-21 18:33:36 +01001834 return std::move(func);
thecha012bfadd92020-08-12 17:25:51 +01001835}
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001836} // namespace detail
1837} // namespace backends
1838} // namespace graph
1839} // namespace arm_compute
1840
Michalis Spyrouf4643372019-11-29 16:17:13 +00001841#endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */