blob: 195b67cf990f09db2bd758840fa578c502cc061d [file] [log] [blame]
Matthew Benthamf1aeab92023-05-30 13:35:34 +00001/*
2 * Copyright (c) 2016-2023 Arm Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
SiCong Li91295492023-07-21 18:16:13 +010024#ifndef ACL_ARM_COMPUTE_FUNCTION_INFO_ACTIVATIONLAYERINFO
25#define ACL_ARM_COMPUTE_FUNCTION_INFO_ACTIVATIONLAYERINFO
Matthew Benthamf1aeab92023-05-30 13:35:34 +000026
SiCong Li91295492023-07-21 18:16:13 +010027#include "arm_compute/core/CoreTypes.h"
Matthew Benthamf1aeab92023-05-30 13:35:34 +000028#include "arm_compute/core/QuantizationInfo.h"
Matthew Benthamf1aeab92023-05-30 13:35:34 +000029
SiCong Li91295492023-07-21 18:16:13 +010030#include <array>
Matthew Benthamf1aeab92023-05-30 13:35:34 +000031
32namespace arm_compute
33{
SiCong Li91295492023-07-21 18:16:13 +010034/** Available activation functions */
35enum class ActivationFunction
36{
37 LOGISTIC, /**< Logistic ( \f$ f(x) = \frac{1}{1 + e^{-x}} \f$ ) */
38 TANH, /**< Hyperbolic tangent ( \f$ f(x) = a \cdot tanh(b \cdot x) \f$ ) */
39 RELU, /**< Rectifier ( \f$ f(x) = max(0,x) \f$ ) */
40 BOUNDED_RELU, /**< Upper Bounded Rectifier ( \f$ f(x) = min(a, max(0,x)) \f$ ) */
41 LU_BOUNDED_RELU, /**< Lower and Upper Bounded Rectifier ( \f$ f(x) = min(a, max(b,x)) \f$ ) */
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010042 LEAKY_RELU, /**< Leaky Rectifier ( \f$ f(x) = \begin{cases} \alpha x & \quad \text{if } x \text{ < 0}\\ x & \quad \text{if } x \geq \text{ 0 } \end{cases} \f$ ) */
43 SOFT_RELU, /**< Soft Rectifier ( \f$ f(x)= log(1+e^x) \f$ ) */
44 ELU, /**< Exponential Linear Unit ( \f$ f(x) = \begin{cases} \alpha (exp(x) - 1) & \quad \text{if } x \text{ < 0}\\ x & \quad \text{if } x \geq \text{ 0 } \end{cases} \f$ ) */
45 ABS, /**< Absolute ( \f$ f(x)= |x| \f$ ) */
46 SQUARE, /**< Square ( \f$ f(x)= x^2 \f$ )*/
47 SQRT, /**< Square root ( \f$ f(x) = \sqrt{x} \f$ )*/
48 LINEAR, /**< Linear ( \f$ f(x)= ax + b \f$ ) */
49 IDENTITY, /**< Identity ( \f$ f(x)= x \f$ ) */
50 HARD_SWISH, /**< Hard-swish ( \f$ f(x) = (x \text{ReLU6}(x+3))/6 = x \min(\max(0,x+3),6)/6 \f$ ) */
51 SWISH, /**< Swish ( \f$ f(x) = \frac{x}{1 + e^{-ax}} = x \text{logistic}(ax) \f$ ) */
52 GELU /**< GELU ( \f$ f(x) = x * 1/2 * 1 + erf(x / \sqrt{2}) \f$ ) */
SiCong Li91295492023-07-21 18:16:13 +010053};
Matthew Benthamf1aeab92023-05-30 13:35:34 +000054/** Activation Layer Information class */
55class ActivationLayerInfo
56{
57public:
58 typedef arm_compute::ActivationFunction ActivationFunction;
59
60 /** Lookup table */
61 using LookupTable256 = std::array<qasymm8_t, 256>;
62
63 ActivationLayerInfo() = default;
64 /** Default Constructor
65 *
66 * @param[in] f The activation function to use.
67 * @param[in] a (Optional) The alpha parameter used by some activation functions
68 * (@ref ActivationFunction::BOUNDED_RELU, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH).
69 * @param[in] b (Optional) The beta parameter used by some activation functions (@ref ActivationFunction::LINEAR, @ref ActivationFunction::LU_BOUNDED_RELU, @ref ActivationFunction::TANH).
70 */
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +010071 ActivationLayerInfo(ActivationFunction f, float a = 0.0f, float b = 0.0f) : _act(f), _a(a), _b(b), _enabled(true)
Matthew Benthamf1aeab92023-05-30 13:35:34 +000072 {
73 }
74 /** Get the type of activation function */
75 ActivationFunction activation() const
76 {
77 return _act;
78 }
79 /** Get the alpha value */
80 float a() const
81 {
82 return _a;
83 }
84 /** Get the beta value */
85 float b() const
86 {
87 return _b;
88 }
89 /** Check if initialised */
90 bool enabled() const
91 {
92 return _enabled;
93 }
94
95#ifdef __aarch64__
96 const LookupTable256 &lut() const
97 {
98 return _lut;
99 }
100 void setLookupTable256(LookupTable256 &lut)
101 {
102 _lut = std::move(lut);
103 }
104#endif // __aarch64__
105private:
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100106 ActivationFunction _act = {ActivationLayerInfo::ActivationFunction::IDENTITY};
Matthew Benthamf1aeab92023-05-30 13:35:34 +0000107 float _a = {};
108 float _b = {};
Felix Thomasmathibalanafd38f02023-09-27 17:46:17 +0100109 bool _enabled = {false};
Matthew Benthamf1aeab92023-05-30 13:35:34 +0000110
111#ifdef __aarch64__
112 LookupTable256 _lut = {};
113#endif // __aarch64__
114};
115} // namespace arm_compute
SiCong Li91295492023-07-21 18:16:13 +0100116#endif /* ACL_ARM_COMPUTE_FUNCTION_INFO_ACTIVATIONLAYERINFO */