blob: f6d43ddd8947373735f20ee3251c61556d50a409 [file] [log] [blame]
Moritz Pflanzer572ade72017-07-21 17:36:33 +01001/*
Georgios Pinitas4b3fba12019-06-04 17:31:46 +01002 * Copyright (c) 2017-2019 ARM Limited.
Moritz Pflanzer572ade72017-07-21 17:36:33 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE
25#define ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE
26
27#include "arm_compute/core/TensorShape.h"
28#include "arm_compute/core/Types.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010029#include "tests/AssetsLibrary.h"
30#include "tests/Globals.h"
31#include "tests/IAccessor.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010032#include "tests/framework/Asserts.h"
33#include "tests/framework/Fixture.h"
Pablo Tellodb8485a2019-09-24 11:03:47 +010034#include "tests/framework/ParametersLibrary.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010035#include "tests/validation/Helpers.h"
Georgios Pinitas5a7e7762017-12-01 16:27:29 +000036#include "tests/validation/reference/ActivationLayer.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010037
38#include <random>
39
40namespace arm_compute
41{
42namespace test
43{
44namespace validation
45{
46template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +000047class ActivationValidationGenericFixture : public framework::Fixture
Moritz Pflanzer572ade72017-07-21 17:36:33 +010048{
49public:
Pablo Tellodb8485a2019-09-24 11:03:47 +010050 ActivationValidationGenericFixture()
51 : _target(parameters->get_ctx<TensorType>())
52 {
53 }
54
Moritz Pflanzer572ade72017-07-21 17:36:33 +010055 template <typename...>
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +010056 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type, QuantizationInfo quantization_info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +010057 {
Moritz Pflanzer572ade72017-07-21 17:36:33 +010058 ActivationLayerInfo info(function, alpha_beta, alpha_beta);
59
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010060 _in_place = in_place;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010061 _data_type = data_type;
giuros01c9573f32019-06-20 10:30:17 +010062 _output_quantization_info = calculate_output_quantization_info(_data_type, info, quantization_info);
63 _input_quantization_info = in_place ? _output_quantization_info : quantization_info;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010064
giuros01c9573f32019-06-20 10:30:17 +010065 _function = function;
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010066 _target = compute_target(shape, info);
67 _reference = compute_reference(shape, info);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010068 }
69
70protected:
71 template <typename U>
72 void fill(U &&tensor)
73 {
74 if(is_data_type_float(_data_type))
75 {
76 float min_bound = 0;
77 float max_bound = 0;
78 std::tie(min_bound, max_bound) = get_activation_layer_test_bounds<T>(_function, _data_type);
79 std::uniform_real_distribution<> distribution(min_bound, max_bound);
80 library->fill(tensor, distribution, 0);
81 }
Manuel Bottini30dbeef2019-06-26 16:23:03 +010082 else if(is_data_type_quantized(tensor.data_type()))
Michel Iwaniec66cc12f2017-12-07 17:26:40 +000083 {
84 library->fill_tensor_uniform(tensor, 0);
85 }
Moritz Pflanzer572ade72017-07-21 17:36:33 +010086 else
87 {
88 int min_bound = 0;
89 int max_bound = 0;
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +010090 std::tie(min_bound, max_bound) = get_activation_layer_test_bounds<T>(_function, _data_type);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010091 std::uniform_int_distribution<> distribution(min_bound, max_bound);
92 library->fill(tensor, distribution, 0);
93 }
94 }
95
Georgios Pinitas4b3fba12019-06-04 17:31:46 +010096 TensorType compute_target(const TensorShape &shape, ActivationLayerInfo info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +010097 {
Pablo Tellodb8485a2019-09-24 11:03:47 +010098 auto ctx = parameters->get_ctx<TensorType>();
Moritz Pflanzer572ade72017-07-21 17:36:33 +010099 // Create tensors
Pablo Tellodb8485a2019-09-24 11:03:47 +0100100 TensorType src = create_tensor<TensorType>(shape, _data_type, 1, _input_quantization_info, DataLayout::NCHW, ctx);
101 TensorType dst = create_tensor<TensorType>(shape, _data_type, 1, _output_quantization_info, DataLayout::NCHW, ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100102
103 // Create and configure function
Pablo Tellodb8485a2019-09-24 11:03:47 +0100104 FunctionType act_layer(ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100105
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100106 TensorType *dst_ptr = _in_place ? nullptr : &dst;
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100107
108 act_layer.configure(&src, dst_ptr, info);
109
110 ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
111 ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS);
112
113 // Allocate tensors
114 src.allocator()->allocate();
115 ARM_COMPUTE_EXPECT(!src.info()->is_resizable(), framework::LogLevel::ERRORS);
116
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100117 if(!_in_place)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100118 {
119 dst.allocator()->allocate();
120 ARM_COMPUTE_EXPECT(!dst.info()->is_resizable(), framework::LogLevel::ERRORS);
121 }
122
123 // Fill tensors
124 fill(AccessorType(src));
125
126 // Compute function
127 act_layer.run();
128
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100129 if(_in_place)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100130 {
131 return src;
132 }
133 else
134 {
135 return dst;
136 }
137 }
138
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100139 SimpleTensor<T> compute_reference(const TensorShape &shape, ActivationLayerInfo info)
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100140 {
141 // Create reference
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100142 SimpleTensor<T> src{ shape, _data_type, 1, _input_quantization_info };
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100143
144 // Fill reference
145 fill(src);
146
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100147 return reference::activation_layer<T>(src, info, _output_quantization_info);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100148 }
149
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100150private:
giuros01c9573f32019-06-20 10:30:17 +0100151 QuantizationInfo calculate_output_quantization_info(DataType dt, const ActivationLayerInfo &act_info, const QuantizationInfo &default_qinfo)
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100152 {
giuros01c9573f32019-06-20 10:30:17 +0100153 auto qasymm8_max = float(std::numeric_limits<uint8_t>::max()) + 1.f;
154 auto qsymm16_max = float(std::numeric_limits<int16_t>::max()) + 1.f;
155
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100156 switch(act_info.activation())
157 {
158 case ActivationLayerInfo::ActivationFunction::TANH:
giuros01c9573f32019-06-20 10:30:17 +0100159 if(dt == DataType::QSYMM16)
160 {
161 return QuantizationInfo(1.f / qsymm16_max, 0);
162 }
163 else if(dt == DataType::QASYMM8)
164 {
165 return QuantizationInfo(1.f / (0.5 * qasymm8_max), int(0.5 * qasymm8_max));
166 }
167 else
168 {
169 return default_qinfo;
170 }
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100171 case ActivationLayerInfo::ActivationFunction::LOGISTIC:
giuros01c9573f32019-06-20 10:30:17 +0100172 if(dt == DataType::QSYMM16)
173 {
174 return QuantizationInfo(1.f / qsymm16_max, 0);
175 }
176 else if(dt == DataType::QASYMM8)
177 {
178 return QuantizationInfo(1.f / qasymm8_max, 0);
179 }
180 else
181 {
182 return default_qinfo;
183 }
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100184 default:
185 return default_qinfo;
186 }
187 }
188
189protected:
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100190 TensorType _target{};
191 SimpleTensor<T> _reference{};
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100192 bool _in_place{};
193 QuantizationInfo _input_quantization_info{};
194 QuantizationInfo _output_quantization_info{};
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100195 DataType _data_type{};
196 ActivationLayerInfo::ActivationFunction _function{};
197};
198
199template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000200class ActivationValidationFixture : public ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100201{
202public:
203 template <typename...>
204 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type)
205 {
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100206 ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, in_place, function, alpha_beta, data_type, QuantizationInfo());
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000207 }
208};
209
210template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
211class ActivationValidationQuantizedFixture : public ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
212{
213public:
214 template <typename...>
215 void setup(TensorShape shape, bool in_place, ActivationLayerInfo::ActivationFunction function, float alpha_beta, DataType data_type, QuantizationInfo quantization_info)
216 {
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100217 ActivationValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(shape, in_place, function, alpha_beta, data_type, quantization_info);
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000218 }
219};
220
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100221} // namespace validation
222} // namespace test
223} // namespace arm_compute
224#endif /* ARM_COMPUTE_TEST_ACTIVATION_LAYER_FIXTURE */