blob: 86d39c0434633e377ee12d7bec99651c62378a89 [file] [log] [blame]
Moritz Pflanzer69d33412017-08-09 11:45:15 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2017-2020 Arm Limited.
Moritz Pflanzer69d33412017-08-09 11:45:15 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef ARM_COMPUTE_TEST_FULLY_CONNECTED_LAYER_FIXTURE
25#define ARM_COMPUTE_TEST_FULLY_CONNECTED_LAYER_FIXTURE
26
27#include "arm_compute/core/TensorShape.h"
28#include "arm_compute/core/Types.h"
29#include "arm_compute/core/Utils.h"
Moritz Pflanzer69d33412017-08-09 11:45:15 +010030#include "tests/AssetsLibrary.h"
31#include "tests/Globals.h"
32#include "tests/IAccessor.h"
33#include "tests/RawTensor.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010034#include "tests/framework/Asserts.h"
35#include "tests/framework/Fixture.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010036#include "tests/validation/Helpers.h"
Giorgio Arena1856ff72020-02-07 13:46:45 +000037#include "tests/validation/reference/ActivationLayer.h"
Georgios Pinitas5a7e7762017-12-01 16:27:29 +000038#include "tests/validation/reference/FullyConnectedLayer.h"
39#include "tests/validation/reference/Utils.h"
Moritz Pflanzer69d33412017-08-09 11:45:15 +010040
41#include <random>
42
43namespace arm_compute
44{
45namespace test
46{
47namespace validation
48{
Giorgio Arenaa855af12018-07-16 17:20:38 +010049template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000050class FullyConnectedLayerValidationGenericFixture : public framework::Fixture
Moritz Pflanzer69d33412017-08-09 11:45:15 +010051{
52public:
Sang-Hoon Parkb66aa3b2020-01-10 14:44:13 +000053 using TDecay = typename std::decay<T>::type;
54 using TBias = typename std::conditional < (std::is_same<TDecay, uint8_t>::value || std::is_same<TDecay, int8_t>::value), int32_t, T >::type;
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000055
56public:
Moritz Pflanzer69d33412017-08-09 11:45:15 +010057 template <typename...>
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000058 void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights,
Giorgio Arena1856ff72020-02-07 13:46:45 +000059 DataType data_type, QuantizationInfo quantization_info, ActivationLayerInfo activation_info)
Moritz Pflanzer69d33412017-08-09 11:45:15 +010060 {
61 ARM_COMPUTE_UNUSED(weights_shape);
62 ARM_COMPUTE_UNUSED(bias_shape);
63
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000064 _data_type = data_type;
65 _bias_data_type = is_data_type_quantized_asymmetric(data_type) ? DataType::S32 : data_type;
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000066 _quantization_info = quantization_info;
Giorgio Arena1856ff72020-02-07 13:46:45 +000067 _activation_info = activation_info;
Moritz Pflanzer69d33412017-08-09 11:45:15 +010068
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000069 _target = compute_target(input_shape, weights_shape, bias_shape, output_shape, transpose_weights, reshape_weights);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010070 _reference = compute_reference(input_shape, weights_shape, bias_shape, output_shape);
Moritz Pflanzer69d33412017-08-09 11:45:15 +010071 }
72
73protected:
74 template <typename U>
75 void fill(U &&tensor, int i)
76 {
Michele Di Giorgio9c700372020-01-08 11:33:44 +000077 if(_data_type == DataType::QASYMM8)
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000078 {
79 std::uniform_int_distribution<uint8_t> distribution(0, 30);
80 library->fill(tensor, distribution, i);
81 }
Michele Di Giorgio9c700372020-01-08 11:33:44 +000082 else if(_data_type == DataType::QASYMM8_SIGNED)
83 {
84 std::uniform_int_distribution<int8_t> distribution(-15, 15);
85 library->fill(tensor, distribution, i);
86 }
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000087 else if(_data_type == DataType::S32)
88 {
89 std::uniform_int_distribution<int32_t> distribution(-50, 50);
90 library->fill(tensor, distribution, i);
91 }
92 else if(is_data_type_float(_data_type))
Moritz Pflanzer69d33412017-08-09 11:45:15 +010093 {
Gian Marco Iodice35aea372018-08-24 14:30:36 +010094 std::uniform_real_distribution<> distribution(-1.0f, 1.0f);
Moritz Pflanzer69d33412017-08-09 11:45:15 +010095 library->fill(tensor, distribution, i);
96 }
97 else
98 {
99 library->fill_tensor_uniform(tensor, i);
100 }
101 }
102
103 TensorType compute_target(const TensorShape &input_shape, const TensorShape &weights_shape, const TensorShape &bias_shape, const TensorShape &output_shape, bool transpose_weights,
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000104 bool reshape_weights)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100105 {
106 TensorShape reshaped_weights_shape(weights_shape);
107
108 // Test actions depending on the target settings
109 //
110 // | reshape | !reshape
111 // -----------+-----------+---------------------------
112 // transpose | | ***
113 // -----------+-----------+---------------------------
Giorgio Arenaa855af12018-07-16 17:20:38 +0100114 // !transpose | transpose | transpose
115 // | |
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100116 //
117 // ***: That combination is invalid. But we can ignore the transpose flag and handle all !reshape the same
118 if(!reshape_weights || !transpose_weights)
119 {
120 const size_t shape_x = reshaped_weights_shape.x();
121 reshaped_weights_shape.set(0, reshaped_weights_shape.y());
122 reshaped_weights_shape.set(1, shape_x);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100123 }
124
125 // Create tensors
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100126 TensorType src = create_tensor<TensorType>(input_shape, _data_type, 1, _quantization_info);
127 TensorType weights = create_tensor<TensorType>(reshaped_weights_shape, _data_type, 1, _quantization_info);
128 TensorType bias = create_tensor<TensorType>(bias_shape, _bias_data_type, 1, _quantization_info);
129 TensorType dst = create_tensor<TensorType>(output_shape, _data_type, 1, _quantization_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100130
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +0100131 // Create Fully Connected layer info
132 FullyConnectedLayerInfo fc_info;
133 fc_info.transpose_weights = transpose_weights;
134 fc_info.are_weights_reshaped = !reshape_weights;
Giorgio Arena1856ff72020-02-07 13:46:45 +0000135 fc_info.activation_info = _activation_info;
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +0100136
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100137 // Create and configure function.
138 FunctionType fc;
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +0100139 fc.configure(&src, &weights, &bias, &dst, fc_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100140
141 ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
142 ARM_COMPUTE_EXPECT(weights.info()->is_resizable(), framework::LogLevel::ERRORS);
143 ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS);
144 ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS);
145
146 // Allocate tensors
147 src.allocator()->allocate();
148 weights.allocator()->allocate();
149 bias.allocator()->allocate();
150 dst.allocator()->allocate();
151
152 ARM_COMPUTE_EXPECT(!src.info()->is_resizable(), framework::LogLevel::ERRORS);
153 ARM_COMPUTE_EXPECT(!weights.info()->is_resizable(), framework::LogLevel::ERRORS);
154 ARM_COMPUTE_EXPECT(!bias.info()->is_resizable(), framework::LogLevel::ERRORS);
155 ARM_COMPUTE_EXPECT(!dst.info()->is_resizable(), framework::LogLevel::ERRORS);
156
157 // Fill tensors
158 fill(AccessorType(src), 0);
159 fill(AccessorType(bias), 2);
160
161 if(!reshape_weights || !transpose_weights)
162 {
163 TensorShape tmp_shape(weights_shape);
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100164 RawTensor tmp(tmp_shape, _data_type, 1);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100165
166 // Fill with original shape
167 fill(tmp, 1);
168
169 // Transpose elementwise
170 tmp = transpose(tmp);
171
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100172 AccessorType weights_accessor(weights);
173
174 for(int i = 0; i < tmp.num_elements(); ++i)
175 {
176 Coordinates coord = index2coord(tmp.shape(), i);
177 std::copy_n(static_cast<const RawTensor::value_type *>(tmp(coord)),
178 tmp.element_size(),
179 static_cast<RawTensor::value_type *>(weights_accessor(coord)));
180 }
181 }
182 else
183 {
184 fill(AccessorType(weights), 1);
185 }
186
187 // Compute NEFullyConnectedLayer function
188 fc.run();
189
190 return dst;
191 }
192
Michalis Spyrou6bff1952019-10-02 17:22:11 +0100193 SimpleTensor<T> compute_reference(const TensorShape &input_shape, const TensorShape &weights_shape, const TensorShape &bias_shape, const TensorShape &output_shape)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100194 {
195 // Create reference
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100196 SimpleTensor<T> src{ input_shape, _data_type, 1, _quantization_info };
197 SimpleTensor<T> weights{ weights_shape, _data_type, 1, _quantization_info };
198 SimpleTensor<TBias> bias{ bias_shape, _bias_data_type, 1, _quantization_info };
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100199
200 // Fill reference
201 fill(src, 0);
202 fill(weights, 1);
203 fill(bias, 2);
204
Giorgio Arena1856ff72020-02-07 13:46:45 +0000205 return reference::activation_layer(reference::fully_connected_layer<T>(src, weights, bias, output_shape), _activation_info, _quantization_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100206 }
207
Giorgio Arena1856ff72020-02-07 13:46:45 +0000208 TensorType _target{};
209 SimpleTensor<T> _reference{};
210 DataType _data_type{};
211 DataType _bias_data_type{};
212 QuantizationInfo _quantization_info{};
213 ActivationLayerInfo _activation_info{};
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100214};
215
Giorgio Arenaa855af12018-07-16 17:20:38 +0100216template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
217class FullyConnectedLayerValidationFixture : public FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100218{
219public:
220 template <typename...>
Giorgio Arena1856ff72020-02-07 13:46:45 +0000221 void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights, DataType data_type,
222 ActivationLayerInfo activation_info)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100223 {
Giorgio Arenaa855af12018-07-16 17:20:38 +0100224 FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(input_shape, weights_shape, bias_shape, output_shape, transpose_weights,
225 reshape_weights, data_type,
Giorgio Arena1856ff72020-02-07 13:46:45 +0000226 QuantizationInfo(), activation_info);
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000227 }
228};
229
Giorgio Arenaa855af12018-07-16 17:20:38 +0100230template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Giorgio Arenaa855af12018-07-16 17:20:38 +0100231class FullyConnectedLayerValidationQuantizedFixture : public FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000232{
233public:
234 template <typename...>
235 void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights, DataType data_type,
Giorgio Arena1856ff72020-02-07 13:46:45 +0000236 QuantizationInfo quantization_info, ActivationLayerInfo activation_info)
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000237 {
Giorgio Arenaa855af12018-07-16 17:20:38 +0100238 FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(input_shape, weights_shape, bias_shape, output_shape, transpose_weights,
239 reshape_weights, data_type,
Giorgio Arena1856ff72020-02-07 13:46:45 +0000240 quantization_info, activation_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100241 }
242};
243} // namespace validation
244} // namespace test
245} // namespace arm_compute
246#endif /* ARM_COMPUTE_TEST_FULLY_CONNECTED_LAYER_FIXTURE */