blob: d3c5382ffb48b00ea094bffdc88cd73f18308bb1 [file] [log] [blame]
Anthony Barbier6ff3b192017-09-04 18:44:23 +01001/*
Anthony Barbiere8a49832018-01-18 10:04:05 +00002 * Copyright (c) 2017-2018 ARM Limited.
Anthony Barbier6ff3b192017-09-04 18:44:23 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef __ARM_COMPUTE_NEACTIVATIONLAYERKERNEL_H__
25#define __ARM_COMPUTE_NEACTIVATIONLAYERKERNEL_H__
26
27#include "arm_compute/core/FixedPoint.h"
Gian Marco Iodiceb30dcc52017-06-20 09:07:21 +010028#include "arm_compute/core/NEON/INEKernel.h"
Michel Iwaniec5dfeae62017-11-29 10:48:23 +000029#include "arm_compute/core/QAsymm8.h"
Anthony Barbier6ff3b192017-09-04 18:44:23 +010030
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000031#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
Pablo Tello91654c42017-07-05 11:32:17 +010032#include <arm_fp16.h>
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000033#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
Pablo Tello91654c42017-07-05 11:32:17 +010034
Anthony Barbier6ff3b192017-09-04 18:44:23 +010035namespace arm_compute
36{
37class ITensor;
38
39/** Interface for the activation layer kernel. */
Gian Marco Iodiceb30dcc52017-06-20 09:07:21 +010040class NEActivationLayerKernel : public INEKernel
Anthony Barbier6ff3b192017-09-04 18:44:23 +010041{
42public:
Anthony Barbiere8a49832018-01-18 10:04:05 +000043 const char *name() const override
44 {
45 return "NEActivationLayerKernel";
46 }
Anthony Barbier6ff3b192017-09-04 18:44:23 +010047 /** Constructor */
48 NEActivationLayerKernel();
49 /** Prevent instances of this class from being copied (As this class contains pointers) */
50 NEActivationLayerKernel(const NEActivationLayerKernel &) = delete;
51 /** Default move constructor */
52 NEActivationLayerKernel(NEActivationLayerKernel &&) = default;
53 /** Prevent instances of this class from being copied (As this class contains pointers) */
54 NEActivationLayerKernel &operator=(const NEActivationLayerKernel &) = delete;
55 /** Default move assignment operator */
56 NEActivationLayerKernel &operator=(NEActivationLayerKernel &&) = default;
57 /** Set the input and output tensor.
58 *
Gian Marco Iodiceb30dcc52017-06-20 09:07:21 +010059 * @note If the output tensor is a nullptr, the activation function will be performed in-place
60 *
61 * @param[in, out] input Source tensor. In case of @p output tensor = nullptr, this tensor will store the result
Georgios Pinitasccc65d42017-06-27 17:39:11 +010062 * of the activation function. Data types supported: QS8/QS16/F32.
Gian Marco Iodiceb30dcc52017-06-20 09:07:21 +010063 * @param[out] output Destination tensor. Data type supported: same as @p input
64 * @param[in] activation_info Activation layer information.
Anthony Barbier6ff3b192017-09-04 18:44:23 +010065 */
Gian Marco Iodiceb30dcc52017-06-20 09:07:21 +010066 void configure(ITensor *input, ITensor *output, ActivationLayerInfo activation_info);
Michalis Spyrouafa5d812017-11-30 14:25:57 +000067 /** Static function to check if given info will lead to a valid configuration of @ref NEActivationLayerKernel
68 *
69 * @param[in] input Source tensor info. In case of @p output tensor info = nullptr, this tensor will store the result
70 * of the activation function. Data types supported: QS8/QS16/F16/F32.
71 * @param[in] output Destination tensor info. Data type supported: same as @p input
72 * @param[in] act_info Activation layer information.
73 *
74 * @return a status
75 */
76 static Status validate(const ITensorInfo *input, const ITensorInfo *output, const ActivationLayerInfo &act_info);
Anthony Barbier6ff3b192017-09-04 18:44:23 +010077
78 // Inherited methods overridden:
Moritz Pflanzerc186b572017-09-07 09:48:04 +010079 void run(const Window &window, const ThreadInfo &info) override;
Anthony Barbier6ff3b192017-09-04 18:44:23 +010080
81private:
82 using ActivationFunction = ActivationLayerInfo::ActivationFunction;
83 /** Common signature for all the specialised @ref NEActivationLayerKernel functions
84 *
85 * @param[in] window Region on which to execute the kernel.
86 */
87 using ActivationFunctionExecutorPtr = void (NEActivationLayerKernel::*)(const Window &window);
88 /** Function to apply an activation function on a tensor.
89 *
Anthony Barbierf202e502017-11-23 18:02:04 +000090 * @param[in] window Region on which to execute the kernel
Anthony Barbier6ff3b192017-09-04 18:44:23 +010091 */
92 template <ActivationLayerInfo::ActivationFunction F, typename T>
93 typename std::enable_if<std::is_same<T, float>::value, void>::type activation(const Window &window);
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000094#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
Pablo Tello91654c42017-07-05 11:32:17 +010095 /** Function to apply an activation function on a tensor.
96 *
Anthony Barbierf202e502017-11-23 18:02:04 +000097 * @param[in] window Region on which to execute the kernel
Pablo Tello91654c42017-07-05 11:32:17 +010098 */
99 template <ActivationLayerInfo::ActivationFunction F, typename T>
100 typename std::enable_if<std::is_same<T, float16_t>::value, void>::type activation(const Window &window);
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +0000101#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100102 /** Function to apply an activation function on a tensor.
103 *
Anthony Barbierf202e502017-11-23 18:02:04 +0000104 * @param[in] window Region on which to execute the kernel
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100105 */
106 template <ActivationLayerInfo::ActivationFunction F, typename T>
107 typename std::enable_if<std::is_same<T, qint8_t>::value, void>::type activation(const Window &window);
Georgios Pinitasccc65d42017-06-27 17:39:11 +0100108 /** Function to apply an activation function on a tensor.
109 *
Anthony Barbierf202e502017-11-23 18:02:04 +0000110 * @param[in] window Region on which to execute the kernel
Georgios Pinitasccc65d42017-06-27 17:39:11 +0100111 */
112 template <ActivationLayerInfo::ActivationFunction F, typename T>
Michel Iwaniec5dfeae62017-11-29 10:48:23 +0000113 typename std::enable_if<std::is_same<T, qasymm8_t>::value, void>::type activation(const Window &window);
114 /** Function to apply an activation function on a tensor.
115 *
116 * @param[in] window Region on which to execute the kernel
117 */
118 template <ActivationLayerInfo::ActivationFunction F, typename T>
Georgios Pinitasccc65d42017-06-27 17:39:11 +0100119 typename std::enable_if<std::is_same<T, qint16_t>::value, void>::type activation(const Window &window);
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100120
121private:
Gian Marco Iodiceb30dcc52017-06-20 09:07:21 +0100122 ITensor *_input;
123 ITensor *_output;
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100124 ActivationFunctionExecutorPtr _func;
125 ActivationLayerInfo _act_info;
126};
Gian Marco Iodice356f6432017-09-22 11:32:21 +0100127} // namespace arm_compute
Anthony Barbier6ff3b192017-09-04 18:44:23 +0100128#endif /*__ARM_COMPUTE_NEACTIVATIONLAYERKERNEL_H__ */