blob: 05695fb797c83d8462c11139005c6322f87729e6 [file] [log] [blame]
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2018-2020 Arm Limited.
Georgios Pinitasda2491f2018-06-01 17:49:09 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
Michalis Spyrouf4643372019-11-29 16:17:13 +000024#ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25#define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
Georgios Pinitasda2491f2018-06-01 17:49:09 +010026
27#include "arm_compute/graph/Logger.h"
28#include "arm_compute/graph/Tensor.h"
29#include "arm_compute/graph/TypePrinter.h"
30#include "arm_compute/graph/Types.h"
Georgios Pinitas9e4824c2019-04-12 13:15:58 +010031#include "arm_compute/graph/Utils.h"
giuros01acce5042019-02-21 17:32:34 +000032#include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
Manuel Bottinibffb41e2019-06-20 16:00:27 +010033#include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
Georgios Pinitasda2491f2018-06-01 17:49:09 +010034#include "arm_compute/graph/backends/Utils.h"
35#include "arm_compute/graph/nodes/Nodes.h"
36
37#include "arm_compute/core/Error.h"
38#include "arm_compute/core/Helpers.h"
39#include "arm_compute/core/ITensorInfo.h"
40#include "arm_compute/core/utils/misc/Cast.h"
41
42namespace arm_compute
43{
44namespace graph
45{
46namespace backends
47{
48namespace detail
49{
Georgios Pinitas0b192e82020-02-20 17:09:28 +000050// Address rule DR-9R5 (1579. Return by converting move constructor)
51#if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5))
52#define RETURN_UNIQUE_PTR(x) (x)
53#else /* defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5)) */
54#define RETURN_UNIQUE_PTR(x) (std::move(x))
55#endif /* defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5)) */
56
Georgios Pinitasda2491f2018-06-01 17:49:09 +010057/** Returns backing tensor of a given tensor
58 *
59 * @tparam TargetInfo Target information
60 *
61 * @param[in] tensor Tensor to extract the backing tensor from
62 *
63 * @return Backing tensor if present else nullptr
64 */
65template <typename TargetInfo>
66typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
67{
68 typename TargetInfo::TensorType *backing_tensor = nullptr;
69 if(tensor != nullptr)
70 {
71 ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
72 // Get backing tensor handle
73 ITensorHandle *tensor_handle = tensor->handle();
74 // Get backing tensor
75 backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
76 }
77
78 return backing_tensor;
79}
80
81template <typename TargetInfo>
82void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
83{
84 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
Pablo Tello32521432018-11-15 14:43:10 +000085 << " Target: " << TargetInfo::TargetType
86 << " ID: " << node.id()
87 << node.name()
Georgios Pinitasda2491f2018-06-01 17:49:09 +010088 << std::endl);
89
90 ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
91 ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
92 ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010093 ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
Georgios Pinitasda2491f2018-06-01 17:49:09 +010094}
95
96/** Creates a backend activation layer function
97 *
98 * @tparam ActivationLayerFunction Backend activation function
99 * @tparam TargetInfo Target-specific information
100 *
101 * @param[in] node Node to create the backend function for
102 *
103 * @return Backend activation layer function
104 */
105template <typename ActivationLayerFunction, typename TargetInfo>
106std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
107{
108 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
109
110 // Extract IO and info
111 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
112 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
113 const ActivationLayerInfo act_info = node.activation_info();
114
115 // Create function
116 auto func = support::cpp14::make_unique<ActivationLayerFunction>();
117 func->configure(input, output, act_info);
118
Pablo Tello32521432018-11-15 14:43:10 +0000119 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
120 << node.name()
121 << " Type: " << node.type()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000122 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100123 << " Data Type: " << input->info()->data_type()
124 << " Shape: " << input->info()->tensor_shape()
125 << " Activation function: " << act_info.activation()
126 << " a: " << act_info.a()
127 << " b: " << act_info.b()
128 << " InPlace : " << is_in_place_operation(input, output)
129 << std::endl);
130
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000131 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100132}
133
thecha01e8f05da2020-08-24 17:21:41 +0100134/** Creates a backend argminmax layer function
135 *
136 * @tparam ArgMinMaxLayerFunction Backend activation function
137 * @tparam TargetInfo Target-specific information
138 *
139 * @param[in] node Node to create the backend function for
140 *
141 * @return Backend argminmax layer function
142 */
143template <typename ArgMinMaxLayerFunction, typename TargetInfo>
144std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
145{
146 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
147
148 // Extract IO and info
149 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
150 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
151 const ReductionOperation op = node.reduction_operation();
152 unsigned int axis = node.axis();
153
154 // Create function
155 auto func = support::cpp14::make_unique<ArgMinMaxLayerFunction>();
156 func->configure(input, axis, output, op);
157
158 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
159 << node.name()
160 << " Type: " << node.type()
161 << " Target: " << TargetInfo::TargetType
162 << " Data Type: " << input->info()->data_type()
163 << " Shape: " << input->info()->tensor_shape()
164 << " Reduction Operation: " << op
165 << " axis: " << axis
166 << std::endl);
167
168 return RETURN_UNIQUE_PTR(func);
169}
170
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100171/** Create a backend batch normalization layer function
172 *
173 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
174 * @tparam TargetInfo Target-specific information
175 *
176 * @param[in] node Node to create the backend function for
177 *
178 * @return Backend batch normalization layer function
179 */
180template <typename BatchNormalizationLayerFunction, typename TargetInfo>
181std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
182{
183 validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
184
185 // Extract IO and info
giuros01acce5042019-02-21 17:32:34 +0000186 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
187 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
188 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
189 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
190 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
191
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100192 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
193 const float epsilon = node.epsilon();
194 const ActivationLayerInfo fused_act = node.fused_activation();
195
196 // Create and configure function
197 auto func = support::cpp14::make_unique<BatchNormalizationLayerFunction>();
198 func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
199
200 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000201 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
202 << node.name()
203 << " Type: " << node.type()
204 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100205 << " Data Type: " << input->info()->data_type()
206 << " Shape: " << input->info()->tensor_shape()
207 << " Epsilon: " << epsilon << " "
208 << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
Pablo Tello32521432018-11-15 14:43:10 +0000209 << " InPlace: " << is_in_place_operation(input, output)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100210 << std::endl);
211
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000212 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100213}
214
giuros01acce5042019-02-21 17:32:34 +0000215/** Create a backend batch normalization layer function
216 *
217 * @tparam BatchNormalizationLayerFunction Backend batch normalization function
218 * @tparam TargetInfo Target-specific information
219 *
220 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000221 * @param[in] ctx Graph context
giuros01acce5042019-02-21 17:32:34 +0000222 *
223 * @return Backend batch normalization layer function
224 */
225template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000226std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
giuros01acce5042019-02-21 17:32:34 +0000227{
228 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
229
230 // Extract IO and info
231 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
232 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
233 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
234 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
235 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
236 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
237 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
238
239 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
240
241 const PadStrideInfo conv_info = node.convolution_info();
242 const unsigned int num_groups = node.num_groups();
243 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
244 const ActivationLayerInfo fused_act = node.fused_activation();
245 const float epsilon = node.epsilon();
246
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000247 // Create and configure function (we assume that functions have been validated before creation)
248 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
249 std::unique_ptr<IFunction> func;
250 std::string func_name;
251
252 using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
253
giuros01acce5042019-02-21 17:32:34 +0000254 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000255 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
256 std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
giuros01acce5042019-02-21 17:32:34 +0000257
258 // Log info
259 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
260 << node.name()
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100261 << " Type: " << node.type()
262 << " Target: " << TargetInfo::TargetType
263 << " Data Type: " << input->info()->data_type()
264 << " Input shape: " << input->info()->tensor_shape()
265 << " Weights shape: " << weights->info()->tensor_shape()
266 << " Output shape: " << output->info()->tensor_shape()
267 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
268 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000269 return RETURN_UNIQUE_PTR(func);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100270}
271
272/** Create a backend fused depthwise convolution batch normalization layer function
273 *
274 * @tparam FusedLayerTypes Fused layer types
275 * @tparam TargetInfo Target-specific information
276 *
277 * @param[in] node Node to create the backend function for
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000278 * @param[in] ctx Graph context
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100279 *
280 * @return Backend fused depthwise convolution batch normalization layer function
281 */
282template <typename FusedLayerTypes, typename TargetInfo>
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000283std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100284{
285 validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
286
287 // Extract IO and info
288 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
289 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
290 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
291 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
292 typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
293 typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
294 typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
295
296 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
297
298 const PadStrideInfo conv_info = node.convolution_info();
299 const unsigned int depth_multiplier = node.depth_multiplier();
300 const ActivationLayerInfo fused_act = node.fused_activation();
301 const float epsilon = node.epsilon();
302
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000303 // Create and configure function (we assume that functions have been validated before creation)
304 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
305 std::unique_ptr<IFunction> func;
306 std::string func_name;
307
308 using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
309
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100310 // Create and configure function
Gian Marco Iodice5dea19e2019-11-08 12:13:48 +0000311 std::tie(func, func_name) = create_named_memory_managed_function<FType>(
312 std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
Manuel Bottinibffb41e2019-06-20 16:00:27 +0100313
314 // Log info
315 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
316 << node.name()
317 << " Type: " << node.type()
giuros01acce5042019-02-21 17:32:34 +0000318 << " Target: " << TargetInfo::TargetType
319 << " Data Type: " << input->info()->data_type()
320 << " Input shape: " << input->info()->tensor_shape()
321 << " Weights shape: " << weights->info()->tensor_shape()
322 << " Output shape: " << output->info()->tensor_shape()
323 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
324 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000325 return RETURN_UNIQUE_PTR(func);
giuros01acce5042019-02-21 17:32:34 +0000326}
327
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100328/** Create a backend bounding box transform layer function
329 *
330 * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
331 * @tparam TargetInfo Target-specific information
332 *
333 * @param[in] node Node to create the backend function for
334 *
335 * @return Backend bounding box transform layer function
336 */
337template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
338std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
339{
340 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
341
342 // Extract IO and info
343 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
344 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
345 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
346 const BoundingBoxTransformInfo bbox_info = node.info();
347
348 // Create and configure function
349 auto func = support::cpp14::make_unique<BoundingBoxTransformLayerFunction>();
350 func->configure(input, output, deltas, bbox_info);
351
352 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000353 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
354 << node.name()
355 << " Type: " << node.type()
356 << " Target: " << TargetInfo::TargetType
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100357 << " Data Type: " << input->info()->data_type()
358 << " Shape: " << input->info()->tensor_shape()
359 << " BoundingBox Info img W: " << bbox_info.img_width() << " "
360 << " BoundingBox Info img H: " << bbox_info.img_height() << " "
361 << std::endl);
362
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000363 return RETURN_UNIQUE_PTR(func);
Manuel Bottinid2048ce2018-10-23 17:00:42 +0100364}
365
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100366/** Create a backend channel shuffle layer function
367 *
368 * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
369 * @tparam TargetInfo Target-specific information
370 *
371 * @param[in] node Node to create the backend function for
372 *
373 * @return Backend channel shuffle layer function
374 */
375template <typename ChannelShuffleLayerFunction, typename TargetInfo>
376std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
377{
378 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
379
380 // Extract IO and info
381 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
382 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
383 const unsigned int num_groups = node.num_groups();
384
385 // Create function
386 auto func = support::cpp14::make_unique<ChannelShuffleLayerFunction>();
387 func->configure(input, output, num_groups);
388
Pablo Tello32521432018-11-15 14:43:10 +0000389 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
390 << node.name()
391 << " Type: " << node.type()
392 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100393 << " Data Type: " << input->info()->data_type()
394 << " Shape: " << input->info()->tensor_shape()
395 << " Num groups: " << num_groups
396 << std::endl);
397
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000398 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100399}
400
Georgios Pinitase2220552018-07-20 13:23:44 +0100401/** Create a backend layer concatenate function
402 *
403 * @tparam ConcatenateLayerFunction Backend concatenate function
404 * @tparam TargetInfo Target-specific information
405 *
406 * @param[in] node Node to create the backend function for
407 *
408 * @return Backend concatenate layer function
409 */
410template <typename ConcatenateLayerFunction, typename TargetInfo>
411std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
412{
413 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
414 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
415
416 // Return nullptr if depth concatenate is switched off
417 if(!node.is_enabled())
418 {
419 return nullptr;
420 }
421
422 // Extract IO and info
Georgios Pinitas4667ddd2020-07-13 21:21:33 +0100423 std::vector<typename TargetInfo::SrcTensorType *> inputs;
Georgios Pinitase2220552018-07-20 13:23:44 +0100424 for(unsigned int i = 0; i < node.num_inputs(); ++i)
425 {
426 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
427 }
428 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas9e4824c2019-04-12 13:15:58 +0100429 const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
430 const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
Georgios Pinitase2220552018-07-20 13:23:44 +0100431
432 // Create and configure function
433 auto func = support::cpp14::make_unique<ConcatenateLayerFunction>();
434 func->configure(inputs, output, concat_axis);
435
436 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000437 const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
438 std::ostringstream qss;
439 if(is_quantized)
440 {
441 qss << " Output QuantInfo: " << output->info()->quantization_info();
442 }
Pablo Tello32521432018-11-15 14:43:10 +0000443 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
444 << node.name()
445 << " Type: " << node.type()
446 << " Target: " << TargetInfo::TargetType
Georgios Pinitase2220552018-07-20 13:23:44 +0100447 << " Data Type: " << output->info()->data_type()
448 << " Shape: " << output->info()->tensor_shape()
449 << " Num Inputs: " << inputs.size()
450 << " Axis: " << concat_axis
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000451 << qss.str()
Georgios Pinitase2220552018-07-20 13:23:44 +0100452 << std::endl);
453
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000454 return RETURN_UNIQUE_PTR(func);
Georgios Pinitase2220552018-07-20 13:23:44 +0100455}
456
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100457/** Create a backend convolution layer function
458 *
459 * @tparam ConvolutionLayerFunctions Backend convolution functions
460 * @tparam TargetInfo Target-specific information
461 *
462 * @param[in] node Node to create the backend function for
463 * @param[in] ctx Graph context
464 *
465 * @return Backend convolution layer function
466 */
467template <typename ConvolutionLayerFunctions, typename TargetInfo>
468std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
469{
470 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
471
472 // Extract IO and info
473 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
474 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
475 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
476 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
477
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100478 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
479
480 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100481 {
482 biases->info()->set_data_type(DataType::S32);
483 }
484
Georgios Pinitas08346e92018-10-16 19:10:46 +0100485 const PadStrideInfo conv_info = node.convolution_info();
486 const unsigned int num_groups = node.num_groups();
487 const ConvolutionMethod conv_algorithm = node.convolution_method();
488 const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
489 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100490
491 // Create and configure function (we assume that functions have been validated before creation)
492 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
493 std::unique_ptr<IFunction> func;
494 std::string func_name;
495
Georgios Pinitase2220552018-07-20 13:23:44 +0100496 if(conv_algorithm == ConvolutionMethod::Winograd)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100497 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100498 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100499 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
500 std::string("WinogradConvolutionLayer"), mm,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100501 input, weights, biases, output, conv_info, fused_act, fast_math);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100502 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100503 else if(conv_algorithm == ConvolutionMethod::Direct)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100504 {
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100505 ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100506 std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
507 std::string("DirectConvolutionLayer"),
Georgios Pinitas08346e92018-10-16 19:10:46 +0100508 input, weights, biases, output, conv_info, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100509 }
510 else if(conv_algorithm == ConvolutionMethod::GEMM)
511 {
512 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
513 std::string("GEMMConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100514 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100515 WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100516 }
517 else
518 {
519 std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
520 std::string("GenericConvolutionLayer"), mm,
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100521 input, weights, biases, output, conv_info,
Georgios Pinitas08346e92018-10-16 19:10:46 +0100522 WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100523 }
524
525 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100526 std::ostringstream qss;
527 if(is_quantized)
528 {
529 qss << " Input QuantInfo: " << input->info()->quantization_info()
530 << " Weights QuantInfo: " << weights->info()->quantization_info()
531 << " Output QuantInfo: " << output->info()->quantization_info();
532 }
Pablo Tello32521432018-11-15 14:43:10 +0000533 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
534 << node.name()
535 << " Type: " << func_name
536 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100537 << " Data Type: " << input->info()->data_type()
Georgios Pinitas2a2db592018-08-15 12:14:46 +0100538 << " Groups: " << num_groups
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100539 << " Input shape: " << input->info()->tensor_shape()
540 << " Weights shape: " << weights->info()->tensor_shape()
541 << " Output shape: " << output->info()->tensor_shape()
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000542 << qss.str()
Georgios Pinitas08346e92018-10-16 19:10:46 +0100543 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100544 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000545 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100546}
547
548/** Create a backend deconvolution layer function
549 *
550 * @tparam DeconvolutionLayerFunction Backend deconvolution function
551 * @tparam TargetInfo Target-specific information
552 *
553 * @param[in] node Node to create the backend function for
554 * @param[in] ctx Graph context
555 *
556 * @return Backend deconvolution layer function
557 */
558template <typename DeconvolutionLayerFunction, typename TargetInfo>
559std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
560{
561 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
562
563 // Extract IO and info
564 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
565 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
566 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
567 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
568
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100569 const PadStrideInfo deconv_info = node.deconvolution_info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100570
571 // Create and configure function (we assume that functions have been validated before creation)
572 std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
573 std::unique_ptr<IFunction> func;
574
575 std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
576 std::string(), mm,
Manuel Bottinic1b76fa2019-06-17 12:04:40 +0100577 input, weights, biases, output, deconv_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100578
579 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000580 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
581 << node.name()
582 << " Type: " << node.type()
583 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100584 << " Data Type: " << input->info()->data_type()
585 << " Input shape: " << input->info()->tensor_shape()
586 << " Weights shape: " << weights->info()->tensor_shape()
587 << " Output shape: " << output->info()->tensor_shape()
588 << std::endl);
589 return func;
590}
591
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100592/** Create a backend layer depth-wise convolution function
593 *
594 * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
595 * @tparam TargetInfo Target-specific information
596 *
597 * @param[in] node Node to create the backend function for
598 *
599 * @return Backend depth-wise convolution layer function
600 */
Manuel Bottini05069f02019-09-26 17:18:26 +0100601template <typename DepthwiseConvolutionLayer, typename TargetInfo>
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100602std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
603{
604 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
605
606 // Extract IO and info
607 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
608 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
609 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
610 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
611
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100612 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
613
614 if(is_quantized)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100615 {
616 biases->info()->set_data_type(DataType::S32);
617 }
618
Manuel Bottini05069f02019-09-26 17:18:26 +0100619 const PadStrideInfo conv_info = node.convolution_info();
620 const unsigned int depth_multiplier = node.depth_multiplier();
621 const ActivationLayerInfo fused_act = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100622
623 // Create and configure function (we assume that functions have been validated before creation)
624 std::unique_ptr<IFunction> func;
625 std::string func_name;
Manuel Bottini05069f02019-09-26 17:18:26 +0100626
627 std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
628 std::string("DepthwiseConvolutionLayer"),
629 input, weights, biases, output, conv_info, depth_multiplier, fused_act);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100630
631 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +0100632 std::ostringstream qss;
633 if(is_quantized)
634 {
635 qss << " Input QuantInfo: " << input->info()->quantization_info()
636 << " Weights QuantInfo: " << weights->info()->quantization_info()
637 << " Output QuantInfo: " << output->info()->quantization_info();
638 }
Pablo Tello32521432018-11-15 14:43:10 +0000639 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
640 << node.name()
641 << " Type: " << func_name
642 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100643 << " Data Type: " << input->info()->data_type()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100644 << " Input shape: " << input->info()->tensor_shape()
645 << " Weights shape: " << weights->info()->tensor_shape()
646 << " Output shape: " << output->info()->tensor_shape()
Georgios Pinitas05045c12018-12-07 18:31:47 +0000647 << " Depth multiplier: " << depth_multiplier
Isabella Gottardi0ae5de92019-03-14 10:32:11 +0000648 << qss.str()
Georgios Pinitas60e98252018-10-22 16:17:20 +0100649 << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100650 << std::endl);
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000651 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100652}
653
thecha010a05e6a2020-08-28 18:40:38 +0100654/** Create a backend depth to space layer function
655 *
656 * @tparam DepthToSpaceLayerNode Function Backend depth to space function
657 * @tparam TargetInfo Target-specific information
658 *
659 * @param[in] node Node to create the backend function for
660 *
661 * @return Backend depth to space layer function
662 */
663template <typename DepthToSpaceLayerFunction, typename TargetInfo>
664std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
665{
666 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
667
668 // Extract IO and info
669 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
670 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
671
672 ARM_COMPUTE_ERROR_ON(input == nullptr);
673 ARM_COMPUTE_ERROR_ON(output == nullptr);
674
675 // Create and configure function
676 auto func = support::cpp14::make_unique<DepthToSpaceLayerFunction>();
677 func->configure(input, output, node.block_shape());
678
679 // Log info
680 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
681 << node.name()
682 << " Type: " << node.type()
683 << " Target: " << TargetInfo::TargetType
684 << " Data Type: " << input->info()->data_type()
685 << " Input shape: " << input->info()->tensor_shape()
686 << " Block Size: " << node.block_shape()
687 << " Output shape: " << output->info()->tensor_shape()
688 << std::endl);
689
690 return RETURN_UNIQUE_PTR(func);
691}
692
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000693/** Create a backend dequantize layer function
694 *
695 * @tparam DequantizationLayer Function Backend dequantize function
696 * @tparam TargetInfo Target-specific information
697 *
698 * @param[in] node Node to create the backend function for
699 *
700 * @return Backend dequantize layer function
701 */
702template <typename DequantizationLayerFunction, typename TargetInfo>
703std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
704{
705 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
706
707 // Extract IO and info
708 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
709 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
710
711 ARM_COMPUTE_ERROR_ON(input == nullptr);
712 ARM_COMPUTE_ERROR_ON(output == nullptr);
713
714 // Create and configure function
715 auto func = support::cpp14::make_unique<DequantizationLayerFunction>();
716 func->configure(input, output);
717
718 // Log info
719 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
720 << node.name()
721 << " Type: " << node.type()
722 << " Target: " << TargetInfo::TargetType
723 << " Data Type: " << input->info()->data_type()
724 << " Input shape: " << input->info()->tensor_shape()
725 << " Input quantization info: " << output->info()->quantization_info()
726 << " Output shape: " << output->info()->tensor_shape()
727 << std::endl);
728
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000729 return RETURN_UNIQUE_PTR(func);
Isabella Gottardicd4e9ab2019-11-05 17:50:27 +0000730}
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000731/** Create a backend detection output layer function
732 *
733 * @tparam DetectionOutputLayer Function Backend detection output function
734 * @tparam TargetInfo Target-specific information
735 *
736 * @param[in] node Node to create the backend function for
737 *
738 * @return Backend detection output layer function
739 */
740template <typename DetectionOutputLayerFunction, typename TargetInfo>
741std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
742{
743 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
744
745 // Extract IO and info
746 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
747 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
748 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
749 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
750 const DetectionOutputLayerInfo detect_info = node.detection_output_info();
751
752 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
753 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
754 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
755 ARM_COMPUTE_ERROR_ON(output == nullptr);
756
757 // Create and configure function
758 auto func = support::cpp14::make_unique<DetectionOutputLayerFunction>();
759 func->configure(input0, input1, input2, output, detect_info);
760
761 // Log info
762 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
763 << node.name()
764 << " Type: " << node.type()
765 << " Target: " << TargetInfo::TargetType
766 << " Data Type: " << input0->info()->data_type()
767 << " Input0 shape: " << input0->info()->tensor_shape()
768 << " Input1 shape: " << input1->info()->tensor_shape()
769 << " Input2 shape: " << input2->info()->tensor_shape()
770 << " Output shape: " << output->info()->tensor_shape()
771 << " DetectionOutputLayer info: " << detect_info
772 << std::endl);
773
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000774 return RETURN_UNIQUE_PTR(func);
Isabella Gottardi7234ed82018-11-27 08:51:10 +0000775}
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000776
777/** Create a backend detection post process layer function
778 *
779 * @tparam DetectionPostProcessLayerFunction Backend detection output function
780 * @tparam TargetInfo Target-specific information
781 *
782 * @param[in] node Node to create the backend function for
783 *
784 * @return Backend detection post process layer function
785 */
786template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
787std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
788{
789 validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
790
791 // Extract IO and info
792 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
793 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
794 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
795 typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
796 typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
797 typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
798 typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
799 const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
800
801 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
802 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
803 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
804 ARM_COMPUTE_ERROR_ON(output0 == nullptr);
805 ARM_COMPUTE_ERROR_ON(output1 == nullptr);
806 ARM_COMPUTE_ERROR_ON(output2 == nullptr);
807 ARM_COMPUTE_ERROR_ON(output3 == nullptr);
808
809 // Create and configure function
810 auto func = support::cpp14::make_unique<DetectionPostProcessLayerFunction>();
811 func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
812
813 // Log info
814 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
815 << node.name()
816 << " Type: " << node.type()
817 << " Target: " << TargetInfo::TargetType
818 << " Data Type: " << input0->info()->data_type()
819 << " Input0 shape: " << input0->info()->tensor_shape()
820 << " Input1 shape: " << input1->info()->tensor_shape()
821 << " Input2 shape: " << input2->info()->tensor_shape()
822 << " Output0 shape: " << output0->info()->tensor_shape()
823 << " Output1 shape: " << output1->info()->tensor_shape()
824 << " Output2 shape: " << output2->info()->tensor_shape()
825 << " Output3 shape: " << output3->info()->tensor_shape()
826 << " DetectionPostProcessLayer info: " << detect_info
827 << std::endl);
828
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000829 return RETURN_UNIQUE_PTR(func);
Isabella Gottardia7acb3c2019-01-08 13:48:44 +0000830}
831
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100832/** Create a backend element-wise operation layer function
833 *
834 * @tparam EltwiseFunctions Backend element-wise function
835 * @tparam TargetInfo Target-specific information
836 *
837 * @param[in] node Node to create the backend function for
838 *
839 * @return Backend element-wise operation layer function
840 */
841template <typename EltwiseFunctions, typename TargetInfo>
842std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
843{
844 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
845
846 // Extract IO and info
847 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
848 typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
849 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
850 const EltwiseOperation eltwise_op = node.eltwise_operation();
851 const ConvertPolicy convert_policy = node.convert_policy();
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000852 const ActivationLayerInfo act_info = node.fused_activation();
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100853 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
854 ARM_COMPUTE_ERROR_ON(input2 == nullptr);
855 ARM_COMPUTE_ERROR_ON(output == nullptr);
856
857 std::unique_ptr<IFunction> func = nullptr;
858 std::string func_name;
Georgios Pinitase2220552018-07-20 13:23:44 +0100859 if(eltwise_op == EltwiseOperation::Add)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100860 {
861 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
862 std::string("ArithmeticAddition"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000863 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100864 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100865 else if(eltwise_op == EltwiseOperation::Sub)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100866 {
867 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
868 std::string("ArithmeticSubtraction"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000869 input1, input2, output, convert_policy, act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100870 }
Georgios Pinitase2220552018-07-20 13:23:44 +0100871 else if(eltwise_op == EltwiseOperation::Mul)
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100872 {
873 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
874 std::string("PixelWiseMultiplication"),
Giorgio Arena8b2a7d32020-02-11 17:21:31 +0000875 input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100876 }
thecha01f8e35842020-07-28 17:28:17 +0100877 else if(eltwise_op == EltwiseOperation::Max)
878 {
879 std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
880 std::string("ElementwiseMaximum"),
881 input1, input2, output, act_info);
882 }
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100883 else
884 {
885 ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
886 }
887
888 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000889 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
890 << node.name()
891 << " Type: " << node.type()
892 << " Target: " << TargetInfo::TargetType
893 << " Operation: " << func_name
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100894 << " Data Type: " << input1->info()->data_type()
Pablo Tello32521432018-11-15 14:43:10 +0000895 << " Shape: " << input1->info()->tensor_shape()
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100896 << std::endl);
897
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000898 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100899}
900
Sheri Zhang16dddd22020-05-27 15:03:48 +0100901/** Create a backend unary element-wise operation layer function
902 *
903 * @tparam UnaryEltwiseFunctions Backend unary element-wise function
904 * @tparam TargetInfo Target-specific information
905 *
906 * @param[in] node Node to create the backend function for
907 *
908 * @return Backend unary element-wise operation layer function
909 */
910template <typename UnaryEltwiseFunctions, typename TargetInfo>
911std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
912{
913 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
914
915 // Extract IO and info
916 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
917 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
918 const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
919
920 ARM_COMPUTE_ERROR_ON(input == nullptr);
921 ARM_COMPUTE_ERROR_ON(output == nullptr);
922
923 std::unique_ptr<IFunction> func = nullptr;
924 std::string func_name;
925 if(eltwise_op == UnaryEltwiseOperation::Exp)
926 {
927 std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
928 std::string("Exp"),
929 input, output);
930 }
931 else
932 {
933 ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
934 }
935
936 // Log info
937 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
938 << node.name()
939 << " Type: " << node.type()
940 << " Target: " << TargetInfo::TargetType
941 << " Operation: " << func_name
942 << " Data Type: " << input->info()->data_type()
943 << " Shape: " << input->info()->tensor_shape()
944 << std::endl);
945
946 return RETURN_UNIQUE_PTR(func);
947}
948
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100949/** Create a backend flatten layer function
950 *
951 * @tparam FlattenLayerFunction Backend flatten function
952 * @tparam TargetInfo Target-specific information
953 *
954 * @param[in] node Node to create the backend function for
955 *
956 * @return Backend flatten layer function
957 */
958template <typename FlattenLayerFunction, typename TargetInfo>
959std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
960{
961 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
962
963 // Extract IO and info
964 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
965 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
966
Georgios Pinitase2220552018-07-20 13:23:44 +0100967 ARM_COMPUTE_ERROR_ON(input == nullptr);
968 ARM_COMPUTE_ERROR_ON(output == nullptr);
969
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100970 // Create and configure function
971 auto func = support::cpp14::make_unique<FlattenLayerFunction>();
972 func->configure(input, output);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100973
974 // Log info
Pablo Tello32521432018-11-15 14:43:10 +0000975 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
976 << node.name()
977 << " Type: " << node.type()
978 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100979 << " Data Type: " << input->info()->data_type()
980 << " Input shape: " << input->info()->tensor_shape()
981 << " Output shape: " << output->info()->tensor_shape()
982 << std::endl);
983
Georgios Pinitas0b192e82020-02-20 17:09:28 +0000984 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +0100985}
986
987/** Create a backend fully connected layer function
988 *
989 * @tparam FullyConnectedLayerFunction Backend fully-connected function
990 * @tparam TargetInfo Target-specific information
991 *
992 * @param[in] node Node to create the backend function for
993 * @param[in] ctx Graph context
994 *
995 * @return Backend fully connected layer function
996 */
997template <typename FullyConnectedLayerFunction, typename TargetInfo>
998std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
999{
1000 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1001
1002 // Extract IO and info
1003 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1004 typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
1005 typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
1006 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +01001007 const FullyConnectedLayerInfo fc_info = node.info();
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001008
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001009 ARM_COMPUTE_ERROR_ON(input == nullptr);
1010 ARM_COMPUTE_ERROR_ON(weights == nullptr);
1011 ARM_COMPUTE_ERROR_ON(output == nullptr);
1012
Georgios Pinitase2220552018-07-20 13:23:44 +01001013 // Create and configure function
Michalis Spyrou1a569a32019-09-10 17:20:34 +01001014 auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1015 auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
1016 auto func = support::cpp14::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
Georgios Pinitase2220552018-07-20 13:23:44 +01001017 func->configure(input, weights, biases, output, fc_info);
1018
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001019 const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1020
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001021 // Log info
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001022 std::ostringstream qss;
1023 if(is_quantized)
1024 {
1025 qss << " Input QuantInfo: " << input->info()->quantization_info()
1026 << " Weights QuantInfo: " << weights->info()->quantization_info()
1027 << " Output QuantInfo: " << output->info()->quantization_info();
1028 }
Pablo Tello32521432018-11-15 14:43:10 +00001029 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1030 << node.name()
1031 << " Type: " << node.type()
1032 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001033 << " Data Type: " << input->info()->data_type()
Georgios Pinitasfd7e8532018-09-07 10:51:27 +01001034 << qss.str()
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001035 << " Input shape: " << input->info()->tensor_shape()
1036 << " Weights shape: " << weights->info()->tensor_shape()
1037 << " Output shape: " << output->info()->tensor_shape()
1038 << std::endl);
1039
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001040 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001041}
1042
Manuel Bottini5209be52019-02-13 16:34:56 +00001043/** Create a backend generate proposals layer function
1044 *
1045 * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1046 * @tparam TargetInfo Target-specific information
1047 *
1048 * @param[in] node Node to create the backend function for
1049 * @param[in] ctx Graph context
1050 *
1051 * @return Backend generate proposals layer function
1052 */
1053template <typename GenerateProposalsLayerFunction, typename TargetInfo>
1054std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1055{
1056 validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1057
1058 // Extract IO and info
1059 typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1060 typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1061 typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1062 typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1063 typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1064 typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1065 const GenerateProposalsInfo info = node.info();
1066
1067 ARM_COMPUTE_ERROR_ON(scores == nullptr);
1068 ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1069 ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1070 ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1071 ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1072
1073 // Create and configure function
1074 auto func = support::cpp14::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
1075 func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1076
1077 // Log info
1078 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1079 << " Target " << TargetInfo::TargetType
1080 << " Data Type: " << scores->info()->data_type()
1081 << " Scores shape: " << scores->info()->tensor_shape()
1082 << " Deltas shape: " << deltas->info()->tensor_shape()
1083 << " Anchors shape: " << anchors->info()->tensor_shape()
1084 << " Proposals shape: " << proposals->info()->tensor_shape()
1085 << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1086 << " Scores Out shape: " << scores_out->info()->tensor_shape()
1087 << std::endl);
1088
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001089 return RETURN_UNIQUE_PTR(func);
Manuel Bottini5209be52019-02-13 16:34:56 +00001090}
1091
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001092/** Create a backend normalization layer function
1093 *
1094 * @tparam NormalizationLayerFunction Backend normalization function
1095 * @tparam TargetInfo Target-specific information
1096 *
1097 * @param[in] node Node to create the backend function for
1098 * @param[in] ctx Graph context
1099 *
1100 * @return Backend normalization layer function
1101 */
1102template <typename NormalizationLayerFunction, typename TargetInfo>
1103std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1104{
1105 ARM_COMPUTE_UNUSED(ctx);
1106
1107 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1108
1109 // Extract IO and info
1110 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1111 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1112 const NormalizationLayerInfo norm_info = node.normalization_info();
1113 ARM_COMPUTE_ERROR_ON(input == nullptr);
1114 ARM_COMPUTE_ERROR_ON(output == nullptr);
1115
1116 // Create and configure function
1117 auto func = support::cpp14::make_unique<NormalizationLayerFunction>();
1118 func->configure(input, output, norm_info);
1119
1120 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001121 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1122 << node.name()
1123 << " Type: " << node.type()
1124 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001125 << " Data Type: " << input->info()->data_type()
1126 << " Input shape: " << input->info()->tensor_shape()
1127 << " Output shape: " << output->info()->tensor_shape()
1128 << " Normalization info: " << norm_info.type()
1129 << std::endl);
1130
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001131 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001132}
1133
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001134/** Create a backend normalize planar YUV layer function
1135 *
1136 * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1137 * @tparam TargetInfo Target-specific information
1138 *
1139 * @param[in] node Node to create the backend function for
1140 *
1141 * @return Backend normalize plnar YUV layer function
1142 */
1143template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
1144std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1145{
1146 validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1147
1148 // Extract IO and info
1149 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1150 typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1151 typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1152 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1153 ARM_COMPUTE_ERROR_ON(input == nullptr);
1154 ARM_COMPUTE_ERROR_ON(mean == nullptr);
1155 ARM_COMPUTE_ERROR_ON(std == nullptr);
1156 ARM_COMPUTE_ERROR_ON(output == nullptr);
1157
1158 // Create and configure function
1159 auto func = support::cpp14::make_unique<NormalizePlanarYUVLayerFunction>();
1160 func->configure(input, output, mean, std);
1161
1162 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001163 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1164 << node.name()
1165 << " Type: " << node.type()
1166 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001167 << " Data Type: " << input->info()->data_type()
1168 << " Shape: " << input->info()->tensor_shape()
1169 << std::endl);
1170
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001171 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgio555d1102018-09-12 13:51:59 +01001172}
1173
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001174/** Create a backend pad layer function
1175 *
1176 * @tparam PadLayerFunction Backend pad function
1177 * @tparam TargetInfo Target-specific information
1178 *
1179 * @param[in] node Node to create the backend function for
1180 *
1181 * @return Backend pad layer function
1182 */
1183template <typename PadLayerFunction, typename TargetInfo>
1184std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1185{
1186 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1187
1188 // Extract IO and info
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001189 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1190 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1191 const PaddingList &padding = node.padding();
1192 const PixelValue pad_value = node.pad_value();
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001193 ARM_COMPUTE_ERROR_ON(input == nullptr);
1194 ARM_COMPUTE_ERROR_ON(output == nullptr);
1195
1196 // Create and configure function
1197 auto func = support::cpp14::make_unique<PadLayerFunction>();
Georgios Pinitas102b0ce2020-02-13 17:59:09 +00001198 func->configure(input, output, padding, pad_value);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001199
1200 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001201 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1202 << node.name()
1203 << " Type: " << node.type()
1204 << " Target: " << TargetInfo::TargetType
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001205 << " Data Type: " << input->info()->data_type()
1206 << " Input shape: " << input->info()->tensor_shape()
1207 << " Output shape: " << output->info()->tensor_shape()
1208 << std::endl);
1209
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001210 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgio4bb17332018-09-26 13:56:51 +01001211}
1212
Georgios Pinitas57c48242018-08-02 13:41:49 +01001213/** Create a backend permute layer function
1214 *
1215 * @tparam PermuteLayerFunction Backend permute function
1216 * @tparam TargetInfo Target-specific information
1217 *
1218 * @param[in] node Node to create the backend function for
1219 *
1220 * @return Backend permute layer function
1221 */
1222template <typename PermuteLayerFunction, typename TargetInfo>
1223std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1224{
1225 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1226
1227 // Extract IO and info
1228 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1229 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1230 const PermutationVector &perm = node.permutation_vector();
1231 ARM_COMPUTE_ERROR_ON(input == nullptr);
1232 ARM_COMPUTE_ERROR_ON(output == nullptr);
1233
1234 // Create and configure function
1235 auto func = support::cpp14::make_unique<PermuteLayerFunction>();
1236 func->configure(input, output, perm);
1237
1238 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001239 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1240 << node.name()
1241 << " Type: " << node.type()
1242 << " Target: " << TargetInfo::TargetType
Georgios Pinitas57c48242018-08-02 13:41:49 +01001243 << " Data Type: " << input->info()->data_type()
1244 << " Input shape: " << input->info()->tensor_shape()
1245 << " Output shape: " << output->info()->tensor_shape()
1246 << " Permutation vector: " << perm
1247 << std::endl);
1248
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001249 return RETURN_UNIQUE_PTR(func);
Georgios Pinitas57c48242018-08-02 13:41:49 +01001250}
1251
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001252/** Create a backend pooling layer function
1253 *
1254 * @tparam PoolingLayerFunction Backend pooling function
1255 * @tparam TargetInfo Target-specific information
1256 *
1257 * @param[in] node Node to create the backend function for
1258 *
1259 * @return Backend pooling layer function
1260 */
1261template <typename PoolingLayerFunction, typename TargetInfo>
1262std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1263{
1264 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1265
1266 // Extract IO and info
1267 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1268 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1269 const PoolingLayerInfo pool_info = node.pooling_info();
1270 ARM_COMPUTE_ERROR_ON(input == nullptr);
1271 ARM_COMPUTE_ERROR_ON(output == nullptr);
1272
1273 // Create and configure function
1274 auto func = support::cpp14::make_unique<PoolingLayerFunction>();
1275 func->configure(input, output, pool_info);
1276
1277 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001278 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1279 << node.name()
1280 << " Type: " << node.type()
1281 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001282 << " Data Type: " << input->info()->data_type()
1283 << " Input shape: " << input->info()->tensor_shape()
1284 << " Output shape: " << output->info()->tensor_shape()
Sang-Hoon Park0cb3da62020-01-15 12:39:56 +00001285 << " Pooling info: " << pool_info.pool_type
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001286 << std::endl);
1287
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001288 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001289}
1290
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001291/** Create a backend PRelu layer function
1292 *
1293 * @tparam PReluFunction Backend PRelu function
1294 * @tparam TargetInfo Target-specific information
1295 *
1296 * @param[in] node Node to create the backend function for
1297 *
1298 * @return Backend PRelu layer function
1299 */
1300template <typename PReluFunction, typename TargetInfo>
1301std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1302{
1303 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1304
1305 // Extract IO and info
1306 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1307 typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1308 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1309 ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1310 ARM_COMPUTE_ERROR_ON(output == nullptr);
1311
1312 // Create and configure function
1313 auto func = support::cpp14::make_unique<PReluFunction>();
1314 func->configure(input, alpha, output);
1315
1316 // Log info
1317 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1318 << node.name()
1319 << " Type: " << node.type()
1320 << " Target: " << TargetInfo::TargetType
1321 << " Data Type: " << input->info()->data_type()
1322 << " Input shape: " << input->info()->tensor_shape()
1323 << " Output shape: " << output->info()->tensor_shape()
1324 << std::endl);
1325
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001326 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasf8c47492020-02-04 17:39:59 +00001327}
1328
Giorgio Arena6e9d0e02020-01-03 15:02:04 +00001329/** Create a backend print layer function
1330 *
1331 * @tparam TargetInfo Target-specific information
1332 *
1333 * @param[in] node Node to create the backend function for
1334 *
1335 * @return Backend print layer function
1336 */
1337template <typename TargetInfo>
1338std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1339{
1340 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1341
1342 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1343 ARM_COMPUTE_ERROR_ON(input == nullptr);
1344 ARM_COMPUTE_UNUSED(input);
1345
1346 // Log info
1347 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1348 << node.name()
1349 << " Type: " << node.type()
1350 << " Target: " << TargetInfo::TargetType
1351 << " Data Type: " << input->info()->data_type()
1352 << " Input shape: " << input->info()->tensor_shape()
1353 << std::endl);
1354
1355 return nullptr;
1356}
1357
Pablo Tello32521432018-11-15 14:43:10 +00001358/** Create a backend priorbox layer function
1359 *
1360 * @tparam PriorBoxLayerFunction Backend priorbox function
1361 * @tparam TargetInfo Target-specific information
1362 *
1363 * @param[in] node Node to create the backend function for
1364 *
1365 * @return Backend priorbox layer function
1366 */
1367template <typename PriorBoxLayerFunction, typename TargetInfo>
1368std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1369{
1370 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1371
1372 // Extract IO and info
1373 typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1374 typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1375 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1376 const PriorBoxLayerInfo prior_info = node.priorbox_info();
1377 ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1378 ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1379 ARM_COMPUTE_ERROR_ON(output == nullptr);
1380
1381 // Create and configure function
1382 auto func = support::cpp14::make_unique<PriorBoxLayerFunction>();
1383 func->configure(input0, input1, output, prior_info);
1384
1385 // Log info
1386 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1387 << node.name()
1388 << " Type: " << node.type()
1389 << " Target: " << TargetInfo::TargetType
1390 << " Data Type: " << input0->info()->data_type()
1391 << " Input0 shape: " << input0->info()->tensor_shape()
1392 << " Input1 shape: " << input1->info()->tensor_shape()
1393 << " Output shape: " << output->info()->tensor_shape()
1394 << " PriorBoxLayer info: " << prior_info
1395 << std::endl);
1396
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001397 return RETURN_UNIQUE_PTR(func);
Pablo Tello32521432018-11-15 14:43:10 +00001398}
1399
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001400/** Create a backend quantization layer function
1401 *
1402 * @tparam QuantizationLayerFunction Backend quantization function
1403 * @tparam TargetInfo Target-specific information
1404 *
1405 * @param[in] node Node to create the backend function for
1406 *
1407 * @return Backend quantization layer function
1408 */
1409template <typename QuantizationLayerFunction, typename TargetInfo>
1410std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1411{
1412 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1413
1414 // Extract IO and info
1415 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1416 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1417 ARM_COMPUTE_ERROR_ON(input == nullptr);
1418 ARM_COMPUTE_ERROR_ON(output == nullptr);
1419
1420 // Create and configure function
1421 auto func = support::cpp14::make_unique<QuantizationLayerFunction>();
1422 func->configure(input, output);
1423
1424 // Log info
1425 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1426 << node.name()
1427 << " Type: " << node.type()
1428 << " Target: " << TargetInfo::TargetType
1429 << " Data Type: " << input->info()->data_type()
1430 << " Input shape: " << input->info()->tensor_shape()
1431 << " Output shape: " << output->info()->tensor_shape()
1432 << std::endl);
1433
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001434 return RETURN_UNIQUE_PTR(func);
Isabella Gottardi3db1ba92019-05-17 12:35:20 +01001435}
1436
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001437/** Create a backend reorg layer function
1438 *
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001439 * @tparam ReorgLayerFunction Backend reorg function
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001440 * @tparam TargetInfo Target-specific information
1441 *
1442 * @param[in] node Node to create the backend function for
1443 *
1444 * @return Backend reshape layer function
1445 */
1446template <typename ReorgLayerFunction, typename TargetInfo>
1447std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1448{
1449 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1450
1451 // Extract IO and info
1452 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1453 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1454 ARM_COMPUTE_ERROR_ON(input == nullptr);
1455 ARM_COMPUTE_ERROR_ON(output == nullptr);
1456
1457 // Create and configure function
1458 auto func = support::cpp14::make_unique<ReorgLayerFunction>();
1459 func->configure(input, output, node.stride());
1460
1461 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001462 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1463 << node.name()
1464 << " Type: " << node.type()
1465 << " Target: " << TargetInfo::TargetType
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001466 << " Data Type: " << input->info()->data_type()
1467 << " Input shape: " << input->info()->tensor_shape()
1468 << " Output shape: " << output->info()->tensor_shape()
1469 << std::endl);
1470
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001471 return RETURN_UNIQUE_PTR(func);
Gian Marco Iodice23e24792018-09-07 15:32:14 +01001472}
1473
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001474/** Create a backend reshape layer function
1475 *
1476 * @tparam ReshapeLayerFunction Backend reshape function
1477 * @tparam TargetInfo Target-specific information
1478 *
1479 * @param[in] node Node to create the backend function for
1480 *
1481 * @return Backend reshape layer function
1482 */
1483template <typename ReshapeLayerFunction, typename TargetInfo>
1484std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1485{
1486 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1487
1488 // Extract IO and info
1489 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1490 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1491 ARM_COMPUTE_ERROR_ON(input == nullptr);
1492 ARM_COMPUTE_ERROR_ON(output == nullptr);
1493
1494 // Create and configure function
1495 auto func = support::cpp14::make_unique<ReshapeLayerFunction>();
1496 func->configure(input, output);
1497
1498 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001499 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1500 << node.name()
1501 << " Type: " << node.type()
1502 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001503 << " Data Type: " << input->info()->data_type()
1504 << " Input shape: " << input->info()->tensor_shape()
1505 << " Output shape: " << output->info()->tensor_shape()
1506 << std::endl);
1507
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001508 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001509}
1510
1511/** Create a backend resize layer function
1512 *
1513 * @tparam ResizeLayerFunction Backend resize function
1514 * @tparam TargetInfo Target-specific information
1515 *
1516 * @param[in] node Node to create the backend function for
1517 *
1518 * @return Backend resize layer function
1519 */
1520template <typename ResizeLayerFunction, typename TargetInfo>
1521std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1522{
1523 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1524
1525 // Extract IO and info
1526 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1527 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1528 ARM_COMPUTE_ERROR_ON(input == nullptr);
1529 ARM_COMPUTE_ERROR_ON(output == nullptr);
1530 const InterpolationPolicy policy = node.policy();
1531
1532 // Create and configure function
1533 auto func = support::cpp14::make_unique<ResizeLayerFunction>();
Sang-Hoon Parkccd94962020-06-09 12:09:24 +01001534 func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT });
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001535
1536 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001537 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1538 << node.name()
1539 << " Type: " << node.type()
1540 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001541 << " Data Type: " << input->info()->data_type()
1542 << " Input shape: " << input->info()->tensor_shape()
1543 << " Output shape: " << output->info()->tensor_shape()
1544 << " Interpolation: " << policy
1545 << std::endl);
1546
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001547 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001548}
1549
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001550/** Create a backend ROI align layer function
1551 *
1552 * @tparam ROIAlignLayerFunction ROI Align function
1553 * @tparam TargetInfo Target-specific information
1554 *
1555 * @param[in] node Node to create the backend function for
1556 *
1557 * @return ROI Align layer function
1558 */
1559template <typename ROIAlignLayerFunction, typename TargetInfo>
1560std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1561{
1562 validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1563
1564 // Extract IO and info
1565 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1566 typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1567 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1568 ARM_COMPUTE_ERROR_ON(input == nullptr);
1569 ARM_COMPUTE_ERROR_ON(output == nullptr);
1570 ARM_COMPUTE_ERROR_ON(rois == nullptr);
1571
1572 const ROIPoolingLayerInfo pool_info = node.pooling_info();
1573
1574 // Create and configure function
1575 auto func = support::cpp14::make_unique<ROIAlignLayerFunction>();
1576
1577 func->configure(input, rois, output, pool_info);
1578
1579 // Log info
Isabella Gottardi0ae5de92019-03-14 10:32:11 +00001580 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1581 << node.name()
1582 << " Type: " << node.type()
1583 << " Target: " << TargetInfo::TargetType
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001584 << " Data Type: " << input->info()->data_type()
1585 << " Input shape: " << input->info()->tensor_shape()
1586 << " Output shape: " << output->info()->tensor_shape()
1587 << " ROIs shape: " << rois->info()->tensor_shape()
1588 << " ROIPooling width: " << pool_info.pooled_width()
1589 << " ROIPooling height: " << pool_info.pooled_height()
1590 << std::endl);
1591
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001592 return RETURN_UNIQUE_PTR(func);
Manuel Bottini3f9d4d72018-10-19 14:04:42 +01001593}
1594
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001595/** Create a backend slice layer function
1596 *
1597 * @tparam SliceLayerFunction Backend slice function
1598 * @tparam TargetInfo Target-specific information
1599 *
1600 * @param[in] node Node to create the backend function for
1601 *
1602 * @return Backend slice layer function
1603 */
1604template <typename SliceLayerFunction, typename TargetInfo>
1605std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1606{
1607 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1608
1609 // Extract IO and info
1610 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1611 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1612 ARM_COMPUTE_ERROR_ON(input == nullptr);
1613 ARM_COMPUTE_ERROR_ON(output == nullptr);
1614
1615 // Create and configure function
1616 auto func = support::cpp14::make_unique<SliceLayerFunction>();
1617 func->configure(input, output, node.starts(), node.ends());
1618
1619 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001620 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1621 << node.name()
1622 << " Type: " << node.type()
1623 << " Target: " << TargetInfo::TargetType
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001624 << " Data Type: " << input->info()->data_type()
1625 << " Input shape: " << input->info()->tensor_shape()
1626 << " Output shape: " << output->info()->tensor_shape()
1627 << std::endl);
1628
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001629 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgioc30b6682018-09-12 17:44:08 +01001630}
1631
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001632/** Create a backend softmax layer function
1633 *
1634 * @tparam SoftmaxLayerFunction Backend softmax function
1635 * @tparam TargetInfo Target-specific information
1636 *
1637 * @param[in] node Node to create the backend function for
1638 * @param[in] ctx Graph context
1639 *
1640 * @return Backend softmax layer function
1641 */
1642template <typename SoftmaxLayerFunction, typename TargetInfo>
1643std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1644{
1645 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1646
1647 // Extract IO and info
1648 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1649 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1650 const float beta = node.beta();
1651 ARM_COMPUTE_ERROR_ON(input == nullptr);
1652 ARM_COMPUTE_ERROR_ON(output == nullptr);
1653
1654 // Create and configure function
1655 auto func = support::cpp14::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
1656 func->configure(input, output, beta);
1657
1658 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001659 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1660 << node.name()
1661 << " Type: " << node.type()
1662 << " Target: " << TargetInfo::TargetType
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001663 << " Data Type: " << input->info()->data_type()
1664 << " Input shape: " << input->info()->tensor_shape()
1665 << " Output shape: " << output->info()->tensor_shape()
1666 << std::endl);
1667
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001668 return RETURN_UNIQUE_PTR(func);
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001669}
Michele Di Giorgioec699752019-03-22 15:25:32 +00001670
1671/** Create a backend layer stack function
1672 *
1673 * @tparam StackLayerFunction Backend stack function
1674 * @tparam TargetInfo Target-specific information
1675 *
1676 * @param[in] node Node to create the backend function for
1677 *
1678 * @return Backend stack layer function
1679 */
1680template <typename StackLayerFunction, typename TargetInfo>
1681std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1682{
1683 ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1684 ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1685
1686 // Extract IO and info
1687 std::vector<typename TargetInfo::TensorType *> inputs;
1688 for(unsigned int i = 0; i < node.num_inputs(); ++i)
1689 {
1690 inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1691 }
1692 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1693 const int axis = node.axis();
1694
1695 // Create and configure function
1696 auto func = support::cpp14::make_unique<StackLayerFunction>();
1697 func->configure(inputs, axis, output);
1698
1699 // Log info
1700 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1701 << node.name()
1702 << " Type: " << node.type()
1703 << " Target: " << TargetInfo::TargetType
1704 << " Data Type: " << output->info()->data_type()
1705 << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1706 << " Output shape: " << output->info()->tensor_shape()
1707 << " Num Inputs: " << inputs.size()
1708 << " Axis: " << axis
1709 << std::endl);
1710
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001711 return RETURN_UNIQUE_PTR(func);
Michele Di Giorgioec699752019-03-22 15:25:32 +00001712}
thecha012bfadd92020-08-12 17:25:51 +01001713
1714/** Create a backend slice layer function
1715 *
1716 * @tparam StridedSliceLayerFunction Backend strided slice function
1717 * @tparam TargetInfo Target-specific information
1718 *
1719 * @param[in] node Node to create the backend function for
1720 *
1721 * @return Backend strided slice layer function
1722 */
1723template <typename StridedSliceLayerFunction, typename TargetInfo>
1724std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1725{
1726 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1727
1728 // Extract IO and info
1729 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1730 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1731 Coordinates starts = node.starts();
1732 Coordinates ends = node.ends();
1733 BiStrides strides = node.strides();
1734 StridedSliceLayerInfo info = node.strided_slice_info();
1735
1736 ARM_COMPUTE_ERROR_ON(input == nullptr);
1737 ARM_COMPUTE_ERROR_ON(output == nullptr);
1738
1739 // Create and configure function
1740 auto func = support::cpp14::make_unique<StridedSliceLayerFunction>();
1741 func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
1742
1743 // Log info
1744 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1745 << node.name()
1746 << " Type: " << node.type()
1747 << " Target: " << TargetInfo::TargetType
1748 << " Data Type: " << input->info()->data_type()
1749 << " Input shape: " << input->info()->tensor_shape()
1750 << " Output shape: " << output->info()->tensor_shape()
1751 << std::endl);
1752
1753 return RETURN_UNIQUE_PTR(func);
1754}
1755
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001756/** Create a backend Upsample layer function
1757 *
1758 * @tparam UpsampleLayerFunction Backend Upsample function
1759 * @tparam TargetInfo Target-specific information
1760 *
1761 * @param[in] node Node to create the backend function for
1762 * @param[in] ctx Graph context
1763 *
1764 * @return Backend Upsample layer function
1765 */
1766template <typename UpsampleLayerFunction, typename TargetInfo>
1767std::unique_ptr<IFunction> create_upsample_layer(UpsampleLayerNode &node, GraphContext &ctx)
1768{
Michalis Spyrou6bff1952019-10-02 17:22:11 +01001769 ARM_COMPUTE_UNUSED(ctx);
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001770 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1771
1772 // Extract IO and info
1773 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1774 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1775 const Size2D info = node.info();
1776 const InterpolationPolicy upsampling_policy = node.upsampling_policy();
1777 ARM_COMPUTE_ERROR_ON(upsampling_policy != InterpolationPolicy::NEAREST_NEIGHBOR);
1778 ARM_COMPUTE_ERROR_ON(info.x() != 2 || info.y() != 2);
1779 ARM_COMPUTE_ERROR_ON(input == nullptr);
1780 ARM_COMPUTE_ERROR_ON(output == nullptr);
1781
1782 // Create and configure function
1783 auto func = support::cpp14::make_unique<UpsampleLayerFunction>();
1784 func->configure(input, output, info, upsampling_policy);
1785
1786 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001787 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1788 << node.name()
1789 << " Type: " << node.type()
1790 << " Target: " << TargetInfo::TargetType
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001791 << " Data Type: " << input->info()->data_type()
1792 << " Input shape: " << input->info()->tensor_shape()
1793 << " Output shape: " << output->info()->tensor_shape()
1794 << " Strides: " << info
1795 << " Upsampling policy: " << upsampling_policy
1796 << std::endl);
1797
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001798 return RETURN_UNIQUE_PTR(func);
Michalis Spyrou4e1c3f32018-09-20 17:14:03 +01001799}
Michalis Spyrou96f67692018-09-13 11:39:28 +01001800/** Create a backend YOLO layer function
1801 *
1802 * @tparam YoloLayerFunction Backend YOLO function
1803 * @tparam TargetInfo Target-specific information
1804 *
1805 * @param[in] node Node to create the backend function for
1806 * @param[in] ctx Graph context
1807 *
1808 * @return Backend YOLO layer function
1809 */
1810template <typename YOLOlayerFunction, typename TargetInfo>
1811std::unique_ptr<IFunction> create_yolo_layer(YOLOLayerNode &node, GraphContext &ctx)
1812{
Michalis Spyrou6bff1952019-10-02 17:22:11 +01001813 ARM_COMPUTE_UNUSED(ctx);
Michalis Spyrou96f67692018-09-13 11:39:28 +01001814 validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1815
1816 // Extract IO and info
1817 typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1818 typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1819 const ActivationLayerInfo act_info = node.activation_info();
1820 const int32_t num_classes = node.num_classes();
1821 ARM_COMPUTE_ERROR_ON(num_classes <= 0);
1822 ARM_COMPUTE_ERROR_ON(input == nullptr);
1823 ARM_COMPUTE_ERROR_ON(output == nullptr);
1824
1825 // Create and configure function
1826 auto func = support::cpp14::make_unique<YOLOlayerFunction>();
1827 func->configure(input, output, act_info, num_classes);
1828
1829 // Log info
Pablo Tello32521432018-11-15 14:43:10 +00001830 ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1831 << node.name()
1832 << " Type: " << node.type()
1833 << " Target: " << TargetInfo::TargetType
Michalis Spyrou96f67692018-09-13 11:39:28 +01001834 << " Data Type: " << input->info()->data_type()
1835 << " Input shape: " << input->info()->tensor_shape()
1836 << " Output shape: " << output->info()->tensor_shape()
1837 << " Activation function: " << act_info.activation()
1838 << " Num classes: " << num_classes
1839 << std::endl);
1840
Georgios Pinitas0b192e82020-02-20 17:09:28 +00001841 return RETURN_UNIQUE_PTR(func);
Michalis Spyrou96f67692018-09-13 11:39:28 +01001842}
Georgios Pinitasda2491f2018-06-01 17:49:09 +01001843} // namespace detail
1844} // namespace backends
1845} // namespace graph
1846} // namespace arm_compute
1847
Michalis Spyrouf4643372019-11-29 16:17:13 +00001848#endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */