blob: 849c649b4c65075fb720f9ba099a0b48160119d5 [file] [log] [blame]
Moritz Pflanzer572ade72017-07-21 17:36:33 +01001/*
Matthew Bentham945b8da2023-07-12 11:54:59 +00002 * Copyright (c) 2017-2021, 2023 Arm Limited.
Moritz Pflanzer572ade72017-07-21 17:36:33 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE
25#define ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE
26
27#include "arm_compute/core/TensorShape.h"
28#include "arm_compute/core/Types.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010029#include "tests/AssetsLibrary.h"
30#include "tests/Globals.h"
31#include "tests/IAccessor.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010032#include "tests/framework/Asserts.h"
33#include "tests/framework/Fixture.h"
Pablo Tellodb8485a2019-09-24 11:03:47 +010034#include "tests/framework/ParametersLibrary.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010035#include "tests/validation/Helpers.h"
Georgios Pinitas5a7e7762017-12-01 16:27:29 +000036#include "tests/validation/reference/ActivationLayer.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010037
38#include <random>
39
40namespace arm_compute
41{
42namespace test
43{
44namespace validation
45{
46template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +000047class ActivationValidationGenericFixture : public framework::Fixture
Moritz Pflanzer572ade72017-07-21 17:36:33 +010048{
49public:
Pablo Tellodb8485a2019-09-24 11:03:47 +010050 ActivationValidationGenericFixture()
51 : _target(parameters->get_ctx<TensorType>())
52 {
53 }
54
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +010055 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type, QuantizationInfo quantization_info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +010056 {
Moritz Pflanzer572ade72017-07-21 17:36:33 +010057 ActivationLayerInfo info(function, alpha_beta, alpha_beta);
58
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010059 _in_place = in_place;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010060 _data_type = data_type;
giuros01c9573f32019-06-20 10:30:17 +010061 _output_quantization_info = calculate_output_quantization_info(_data_type, info, quantization_info);
62 _input_quantization_info = in_place ? _output_quantization_info : quantization_info;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010063
giuros01c9573f32019-06-20 10:30:17 +010064 _function = function;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010065 _target = compute_target(shape, info);
66 _reference = compute_reference(shape, info);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010067 }
68
69protected:
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +010070 std::vector<T> get_boundary_values(T min, T max)
71 {
72 // This function will return a vector filled with the following values that can
73 // represent two partitions derived from equivalent partitioning.
74 // * Lower parition: min, min + delta, lower quarter (nominal), center - delta
75 // * Upper partition: center, center + delta, upper quarter (nominal), max - delta, max
76 const auto delta = is_data_type_float(_data_type) ? T(0.1f) : T(1);
77 const auto center_value = (min + max) / 2;
78 const auto lower_quarter = (min + center_value) / 2;
79 const auto upper_quarter = (center_value + max) / 2;
80
81 std::vector<T> boundary_values{};
82
83 // To ensure all the inserted values are within the given range after subtracing/adding delta
84 auto insert_values = [&boundary_values, &min, &max](const std::initializer_list<T> &new_values)
85 {
86 for(auto &v : new_values)
87 {
88 if(v >= min && v <= max)
89 {
90 boundary_values.emplace_back(v);
91 }
92 }
93 };
94
95 insert_values({ min, static_cast<T>(min + delta), static_cast<T>(lower_quarter), static_cast<T>(center_value - delta) }); // lower partition
96 insert_values({ static_cast<T>(center_value), static_cast<T>(center_value + delta), static_cast<T>(upper_quarter), static_cast<T>(max - delta), max }); // upper partition
97
98 return boundary_values;
99 }
100
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100101 template <typename U>
102 void fill(U &&tensor)
103 {
104 if(is_data_type_float(_data_type))
105 {
106 float min_bound = 0;
107 float max_bound = 0;
108 std::tie(min_bound, max_bound) = get_activation_layer_test_bounds<T>(_function, _data_type);
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100109 library->fill_static_values(tensor, get_boundary_values(static_cast<T>(min_bound), static_cast<T>(max_bound)));
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000110 }
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100111 else
112 {
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100113 PixelValue min{};
114 PixelValue max{};
115 std::tie(min, max) = get_min_max(tensor.data_type());
116 library->fill_static_values(tensor, get_boundary_values(min.get<T>(), max.get<T>()));
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100117 }
118 }
119
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100120 TensorType compute_target(const TensorShape &shape, ActivationLayerInfo info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100121 {
Pablo Tellodb8485a2019-09-24 11:03:47 +0100122 auto ctx = parameters->get_ctx<TensorType>();
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100123 // Create tensors
Pablo Tellodb8485a2019-09-24 11:03:47 +0100124 TensorType src = create_tensor<TensorType>(shape, _data_type, 1, _input_quantization_info, DataLayout::NCHW, ctx);
125 TensorType dst = create_tensor<TensorType>(shape, _data_type, 1, _output_quantization_info, DataLayout::NCHW, ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100126
127 // Create and configure function
Pablo Tellodb8485a2019-09-24 11:03:47 +0100128 FunctionType act_layer(ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100129
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100130 TensorType *dst_ptr = _in_place ? nullptr : &dst;
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100131
132 act_layer.configure(&src, dst_ptr, info);
133
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100134 ARM_COMPUTE_ASSERT(src.info()->is_resizable());
135 ARM_COMPUTE_ASSERT(dst.info()->is_resizable());
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100136
137 // Allocate tensors
138 src.allocator()->allocate();
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100139 ARM_COMPUTE_ASSERT(!src.info()->is_resizable());
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100140
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100141 if(!_in_place)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100142 {
143 dst.allocator()->allocate();
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100144 ARM_COMPUTE_ASSERT(!dst.info()->is_resizable());
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100145 }
146
147 // Fill tensors
148 fill(AccessorType(src));
149
150 // Compute function
151 act_layer.run();
152
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100153 if(_in_place)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100154 {
155 return src;
156 }
157 else
158 {
159 return dst;
160 }
161 }
162
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100163 SimpleTensor<T> compute_reference(const TensorShape &shape, ActivationLayerInfo info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100164 {
165 // Create reference
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100166 SimpleTensor<T> src{ shape, _data_type, 1, _input_quantization_info };
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100167
168 // Fill reference
169 fill(src);
170
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100171 return reference::activation_layer<T>(src, info, _output_quantization_info);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100172 }
173
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100174private:
giuros01c9573f32019-06-20 10:30:17 +0100175 QuantizationInfo calculate_output_quantization_info(DataType dt, const ActivationLayerInfo &act_info, const QuantizationInfo &default_qinfo)
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100176 {
Michalis Spyrou8d4d1b82019-11-28 11:31:23 +0000177 auto qasymm8_max = float(std::numeric_limits<uint8_t>::max()) + 1.f;
178 auto qasymm8_signed_max = float(std::numeric_limits<int8_t>::max()) + 1.f;
179 auto qsymm16_max = float(std::numeric_limits<int16_t>::max()) + 1.f;
giuros01c9573f32019-06-20 10:30:17 +0100180
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100181 switch(act_info.activation())
182 {
183 case ActivationLayerInfo::ActivationFunction::TANH:
giuros01c9573f32019-06-20 10:30:17 +0100184 if(dt == DataType::QSYMM16)
185 {
186 return QuantizationInfo(1.f / qsymm16_max, 0);
187 }
188 else if(dt == DataType::QASYMM8)
189 {
190 return QuantizationInfo(1.f / (0.5 * qasymm8_max), int(0.5 * qasymm8_max));
191 }
Michalis Spyrou8d4d1b82019-11-28 11:31:23 +0000192 else if(dt == DataType::QASYMM8_SIGNED)
193 {
194 return QuantizationInfo(1.f / qasymm8_signed_max, 0);
195 }
giuros01c9573f32019-06-20 10:30:17 +0100196 else
197 {
198 return default_qinfo;
199 }
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100200 case ActivationLayerInfo::ActivationFunction::LOGISTIC:
giuros01c9573f32019-06-20 10:30:17 +0100201 if(dt == DataType::QSYMM16)
202 {
203 return QuantizationInfo(1.f / qsymm16_max, 0);
204 }
205 else if(dt == DataType::QASYMM8)
206 {
207 return QuantizationInfo(1.f / qasymm8_max, 0);
208 }
Michalis Spyrou8d4d1b82019-11-28 11:31:23 +0000209 else if(dt == DataType::QASYMM8_SIGNED)
210 {
211 return QuantizationInfo(1.f / (2.f * qasymm8_signed_max), -int(qasymm8_signed_max));
212 }
giuros01c9573f32019-06-20 10:30:17 +0100213 else
214 {
215 return default_qinfo;
216 }
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100217 default:
218 return default_qinfo;
219 }
220 }
221
222protected:
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100223 TensorType _target{};
224 SimpleTensor<T> _reference{};
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100225 bool _in_place{};
226 QuantizationInfo _input_quantization_info{};
227 QuantizationInfo _output_quantization_info{};
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100228 DataType _data_type{};
229 ActivationLayerInfo::ActivationFunction _function{};
230};
231
232template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000233class ActivationValidationFixture : public ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100234{
235public:
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100236 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type)
237 {
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100238 ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, in_place, function, alpha_beta, data_type, QuantizationInfo());
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000239 }
240};
241
242template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
243class ActivationValidationQuantizedFixture : public ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
244{
245public:
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000246 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type, QuantizationInfo quantization_info)
247 {
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100248 ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, in_place, function, alpha_beta, data_type, quantization_info);
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000249 }
250};
251
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100252} // namespace validation
253} // namespace test
254} // namespace arm_compute
255#endif /* ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE */