blob: 531b3abdf992eb9cb512cca26a72f414a101d2e8 [file] [log] [blame]
Moritz Pflanzer572ade72017-07-21 17:36:33 +01001/*
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +01002 * Copyright (c) 2017-2021 Arm Limited.
Moritz Pflanzer572ade72017-07-21 17:36:33 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE
25#define ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE
26
27#include "arm_compute/core/TensorShape.h"
28#include "arm_compute/core/Types.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010029#include "tests/AssetsLibrary.h"
30#include "tests/Globals.h"
31#include "tests/IAccessor.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010032#include "tests/framework/Asserts.h"
33#include "tests/framework/Fixture.h"
Pablo Tellodb8485a2019-09-24 11:03:47 +010034#include "tests/framework/ParametersLibrary.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010035#include "tests/validation/Helpers.h"
Georgios Pinitas5a7e7762017-12-01 16:27:29 +000036#include "tests/validation/reference/ActivationLayer.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010037
38#include <random>
39
40namespace arm_compute
41{
42namespace test
43{
44namespace validation
45{
46template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +000047class ActivationValidationGenericFixture : public framework::Fixture
Moritz Pflanzer572ade72017-07-21 17:36:33 +010048{
49public:
Pablo Tellodb8485a2019-09-24 11:03:47 +010050 ActivationValidationGenericFixture()
51 : _target(parameters->get_ctx<TensorType>())
52 {
53 }
54
Moritz Pflanzer572ade72017-07-21 17:36:33 +010055 template <typename...>
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +010056 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type, QuantizationInfo quantization_info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +010057 {
Moritz Pflanzer572ade72017-07-21 17:36:33 +010058 ActivationLayerInfo info(function, alpha_beta, alpha_beta);
59
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010060 _in_place = in_place;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010061 _data_type = data_type;
giuros01c9573f32019-06-20 10:30:17 +010062 _output_quantization_info = calculate_output_quantization_info(_data_type, info, quantization_info);
63 _input_quantization_info = in_place ? _output_quantization_info : quantization_info;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010064
giuros01c9573f32019-06-20 10:30:17 +010065 _function = function;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010066 _target = compute_target(shape, info);
67 _reference = compute_reference(shape, info);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010068 }
69
70protected:
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +010071 std::vector<T> get_boundary_values(T min, T max)
72 {
73 // This function will return a vector filled with the following values that can
74 // represent two partitions derived from equivalent partitioning.
75 // * Lower parition: min, min + delta, lower quarter (nominal), center - delta
76 // * Upper partition: center, center + delta, upper quarter (nominal), max - delta, max
77 const auto delta = is_data_type_float(_data_type) ? T(0.1f) : T(1);
78 const auto center_value = (min + max) / 2;
79 const auto lower_quarter = (min + center_value) / 2;
80 const auto upper_quarter = (center_value + max) / 2;
81
82 std::vector<T> boundary_values{};
83
84 // To ensure all the inserted values are within the given range after subtracing/adding delta
85 auto insert_values = [&boundary_values, &min, &max](const std::initializer_list<T> &new_values)
86 {
87 for(auto &v : new_values)
88 {
89 if(v >= min && v <= max)
90 {
91 boundary_values.emplace_back(v);
92 }
93 }
94 };
95
96 insert_values({ min, static_cast<T>(min + delta), static_cast<T>(lower_quarter), static_cast<T>(center_value - delta) }); // lower partition
97 insert_values({ static_cast<T>(center_value), static_cast<T>(center_value + delta), static_cast<T>(upper_quarter), static_cast<T>(max - delta), max }); // upper partition
98
99 return boundary_values;
100 }
101
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100102 template <typename U>
103 void fill(U &&tensor)
104 {
105 if(is_data_type_float(_data_type))
106 {
107 float min_bound = 0;
108 float max_bound = 0;
109 std::tie(min_bound, max_bound) = get_activation_layer_test_bounds<T>(_function, _data_type);
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100110 library->fill_static_values(tensor, get_boundary_values(static_cast<T>(min_bound), static_cast<T>(max_bound)));
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000111 }
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100112 else
113 {
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100114 PixelValue min{};
115 PixelValue max{};
116 std::tie(min, max) = get_min_max(tensor.data_type());
117 library->fill_static_values(tensor, get_boundary_values(min.get<T>(), max.get<T>()));
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100118 }
119 }
120
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100121 TensorType compute_target(const TensorShape &shape, ActivationLayerInfo info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100122 {
Pablo Tellodb8485a2019-09-24 11:03:47 +0100123 auto ctx = parameters->get_ctx<TensorType>();
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100124 // Create tensors
Pablo Tellodb8485a2019-09-24 11:03:47 +0100125 TensorType src = create_tensor<TensorType>(shape, _data_type, 1, _input_quantization_info, DataLayout::NCHW, ctx);
126 TensorType dst = create_tensor<TensorType>(shape, _data_type, 1, _output_quantization_info, DataLayout::NCHW, ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100127
128 // Create and configure function
Pablo Tellodb8485a2019-09-24 11:03:47 +0100129 FunctionType act_layer(ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100130
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100131 TensorType *dst_ptr = _in_place ? nullptr : &dst;
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100132
133 act_layer.configure(&src, dst_ptr, info);
134
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100135 ARM_COMPUTE_ASSERT(src.info()->is_resizable());
136 ARM_COMPUTE_ASSERT(dst.info()->is_resizable());
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100137
138 // Allocate tensors
139 src.allocator()->allocate();
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100140 ARM_COMPUTE_ASSERT(!src.info()->is_resizable());
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100141
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100142 if(!_in_place)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100143 {
144 dst.allocator()->allocate();
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100145 ARM_COMPUTE_ASSERT(!dst.info()->is_resizable());
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100146 }
147
148 // Fill tensors
149 fill(AccessorType(src));
150
151 // Compute function
152 act_layer.run();
153
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100154 if(_in_place)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100155 {
156 return src;
157 }
158 else
159 {
160 return dst;
161 }
162 }
163
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100164 SimpleTensor<T> compute_reference(const TensorShape &shape, ActivationLayerInfo info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100165 {
166 // Create reference
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100167 SimpleTensor<T> src{ shape, _data_type, 1, _input_quantization_info };
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100168
169 // Fill reference
170 fill(src);
171
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100172 return reference::activation_layer<T>(src, info, _output_quantization_info);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100173 }
174
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100175private:
giuros01c9573f32019-06-20 10:30:17 +0100176 QuantizationInfo calculate_output_quantization_info(DataType dt, const ActivationLayerInfo &act_info, const QuantizationInfo &default_qinfo)
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100177 {
Michalis Spyrou8d4d1b82019-11-28 11:31:23 +0000178 auto qasymm8_max = float(std::numeric_limits<uint8_t>::max()) + 1.f;
179 auto qasymm8_signed_max = float(std::numeric_limits<int8_t>::max()) + 1.f;
180 auto qsymm16_max = float(std::numeric_limits<int16_t>::max()) + 1.f;
giuros01c9573f32019-06-20 10:30:17 +0100181
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100182 switch(act_info.activation())
183 {
184 case ActivationLayerInfo::ActivationFunction::TANH:
giuros01c9573f32019-06-20 10:30:17 +0100185 if(dt == DataType::QSYMM16)
186 {
187 return QuantizationInfo(1.f / qsymm16_max, 0);
188 }
189 else if(dt == DataType::QASYMM8)
190 {
191 return QuantizationInfo(1.f / (0.5 * qasymm8_max), int(0.5 * qasymm8_max));
192 }
Michalis Spyrou8d4d1b82019-11-28 11:31:23 +0000193 else if(dt == DataType::QASYMM8_SIGNED)
194 {
195 return QuantizationInfo(1.f / qasymm8_signed_max, 0);
196 }
giuros01c9573f32019-06-20 10:30:17 +0100197 else
198 {
199 return default_qinfo;
200 }
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100201 case ActivationLayerInfo::ActivationFunction::LOGISTIC:
giuros01c9573f32019-06-20 10:30:17 +0100202 if(dt == DataType::QSYMM16)
203 {
204 return QuantizationInfo(1.f / qsymm16_max, 0);
205 }
206 else if(dt == DataType::QASYMM8)
207 {
208 return QuantizationInfo(1.f / qasymm8_max, 0);
209 }
Michalis Spyrou8d4d1b82019-11-28 11:31:23 +0000210 else if(dt == DataType::QASYMM8_SIGNED)
211 {
212 return QuantizationInfo(1.f / (2.f * qasymm8_signed_max), -int(qasymm8_signed_max));
213 }
giuros01c9573f32019-06-20 10:30:17 +0100214 else
215 {
216 return default_qinfo;
217 }
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100218 default:
219 return default_qinfo;
220 }
221 }
222
223protected:
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100224 TensorType _target{};
225 SimpleTensor<T> _reference{};
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100226 bool _in_place{};
227 QuantizationInfo _input_quantization_info{};
228 QuantizationInfo _output_quantization_info{};
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100229 DataType _data_type{};
230 ActivationLayerInfo::ActivationFunction _function{};
231};
232
233template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000234class ActivationValidationFixture : public ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100235{
236public:
237 template <typename...>
238 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type)
239 {
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100240 ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, in_place, function, alpha_beta, data_type, QuantizationInfo());
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000241 }
242};
243
244template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
245class ActivationValidationQuantizedFixture : public ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
246{
247public:
248 template <typename...>
249 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type, QuantizationInfo quantization_info)
250 {
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100251 ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, in_place, function, alpha_beta, data_type, quantization_info);
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000252 }
253};
254
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100255} // namespace validation
256} // namespace test
257} // namespace arm_compute
258#endif /* ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE */