blob: 3048c56f6b8bf7042ec28ce6bfb43f14494969fa [file] [log] [blame]
Moritz Pflanzer69d33412017-08-09 11:45:15 +01001/*
Giorgio Arena33b103b2021-01-08 10:37:15 +00002 * Copyright (c) 2017-2021 Arm Limited.
Moritz Pflanzer69d33412017-08-09 11:45:15 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef ARM_COMPUTE_TEST_FULLY_CONNECTED_LAYER_FIXTURE
25#define ARM_COMPUTE_TEST_FULLY_CONNECTED_LAYER_FIXTURE
26
27#include "arm_compute/core/TensorShape.h"
28#include "arm_compute/core/Types.h"
29#include "arm_compute/core/Utils.h"
Moritz Pflanzer69d33412017-08-09 11:45:15 +010030#include "tests/AssetsLibrary.h"
31#include "tests/Globals.h"
32#include "tests/IAccessor.h"
33#include "tests/RawTensor.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010034#include "tests/framework/Asserts.h"
35#include "tests/framework/Fixture.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010036#include "tests/validation/Helpers.h"
Giorgio Arena1856ff72020-02-07 13:46:45 +000037#include "tests/validation/reference/ActivationLayer.h"
Georgios Pinitas5a7e7762017-12-01 16:27:29 +000038#include "tests/validation/reference/FullyConnectedLayer.h"
39#include "tests/validation/reference/Utils.h"
Moritz Pflanzer69d33412017-08-09 11:45:15 +010040
41#include <random>
42
43namespace arm_compute
44{
45namespace test
46{
47namespace validation
48{
Giorgio Arenaa855af12018-07-16 17:20:38 +010049template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000050class FullyConnectedLayerValidationGenericFixture : public framework::Fixture
Moritz Pflanzer69d33412017-08-09 11:45:15 +010051{
52public:
Sang-Hoon Parkb66aa3b2020-01-10 14:44:13 +000053 using TDecay = typename std::decay<T>::type;
54 using TBias = typename std::conditional < (std::is_same<TDecay, uint8_t>::value || std::is_same<TDecay, int8_t>::value), int32_t, T >::type;
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000055
56public:
Moritz Pflanzer69d33412017-08-09 11:45:15 +010057 template <typename...>
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000058 void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights,
Manuel Bottinica62c6f2021-03-23 11:50:34 +000059 DataType data_type, QuantizationInfo quantization_info, ActivationLayerInfo activation_info, bool mixed_layout = false)
Moritz Pflanzer69d33412017-08-09 11:45:15 +010060 {
61 ARM_COMPUTE_UNUSED(weights_shape);
62 ARM_COMPUTE_UNUSED(bias_shape);
63
Manuel Bottinica62c6f2021-03-23 11:50:34 +000064 _mixed_layout = mixed_layout;
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000065 _data_type = data_type;
66 _bias_data_type = is_data_type_quantized_asymmetric(data_type) ? DataType::S32 : data_type;
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000067 _quantization_info = quantization_info;
Giorgio Arena1856ff72020-02-07 13:46:45 +000068 _activation_info = activation_info;
Moritz Pflanzer69d33412017-08-09 11:45:15 +010069
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000070 _target = compute_target(input_shape, weights_shape, bias_shape, output_shape, transpose_weights, reshape_weights);
Michalis Spyrou6bff1952019-10-02 17:22:11 +010071 _reference = compute_reference(input_shape, weights_shape, bias_shape, output_shape);
Moritz Pflanzer69d33412017-08-09 11:45:15 +010072 }
73
74protected:
Manuel Bottinica62c6f2021-03-23 11:50:34 +000075 void mix_layout(FunctionType &layer, TensorType &src, TensorType &dst)
76 {
77 const DataLayout data_layout = src.info()->data_layout();
78 // Test Multi DataLayout graph cases, when the data layout changes after configure
79 src.info()->set_data_layout(data_layout == DataLayout::NCHW ? DataLayout::NHWC : DataLayout::NCHW);
80 dst.info()->set_data_layout(data_layout == DataLayout::NCHW ? DataLayout::NHWC : DataLayout::NCHW);
81
82 // Compute Convolution function
83 layer.run();
84
85 // Reinstating original data layout for the test suite to properly check the values
86 src.info()->set_data_layout(data_layout);
87 dst.info()->set_data_layout(data_layout);
88 }
89
Moritz Pflanzer69d33412017-08-09 11:45:15 +010090 template <typename U>
91 void fill(U &&tensor, int i)
92 {
Michele Di Giorgio9c700372020-01-08 11:33:44 +000093 if(_data_type == DataType::QASYMM8)
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +000094 {
95 std::uniform_int_distribution<uint8_t> distribution(0, 30);
96 library->fill(tensor, distribution, i);
97 }
Michele Di Giorgio9c700372020-01-08 11:33:44 +000098 else if(_data_type == DataType::QASYMM8_SIGNED)
99 {
100 std::uniform_int_distribution<int8_t> distribution(-15, 15);
101 library->fill(tensor, distribution, i);
102 }
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000103 else if(_data_type == DataType::S32)
104 {
105 std::uniform_int_distribution<int32_t> distribution(-50, 50);
106 library->fill(tensor, distribution, i);
107 }
Giorgio Arena4bdd1772020-12-17 16:47:07 +0000108 else if(_data_type == DataType::F16)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100109 {
Giorgio Arena33b103b2021-01-08 10:37:15 +0000110 arm_compute::utils::uniform_real_distribution_16bit<half> distribution(-1.0f, 1.0f);
Giorgio Arena4bdd1772020-12-17 16:47:07 +0000111 library->fill(tensor, distribution, i);
112 }
113 else if(_data_type == DataType::F32)
114 {
115 std::uniform_real_distribution<float> distribution(-1.0f, 1.0f);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100116 library->fill(tensor, distribution, i);
117 }
118 else
119 {
120 library->fill_tensor_uniform(tensor, i);
121 }
122 }
123
124 TensorType compute_target(const TensorShape &input_shape, const TensorShape &weights_shape, const TensorShape &bias_shape, const TensorShape &output_shape, bool transpose_weights,
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000125 bool reshape_weights)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100126 {
127 TensorShape reshaped_weights_shape(weights_shape);
128
129 // Test actions depending on the target settings
130 //
131 // | reshape | !reshape
132 // -----------+-----------+---------------------------
133 // transpose | | ***
134 // -----------+-----------+---------------------------
Giorgio Arenaa855af12018-07-16 17:20:38 +0100135 // !transpose | transpose | transpose
136 // | |
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100137 //
138 // ***: That combination is invalid. But we can ignore the transpose flag and handle all !reshape the same
139 if(!reshape_weights || !transpose_weights)
140 {
141 const size_t shape_x = reshaped_weights_shape.x();
142 reshaped_weights_shape.set(0, reshaped_weights_shape.y());
143 reshaped_weights_shape.set(1, shape_x);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100144 }
145
146 // Create tensors
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100147 TensorType src = create_tensor<TensorType>(input_shape, _data_type, 1, _quantization_info);
148 TensorType weights = create_tensor<TensorType>(reshaped_weights_shape, _data_type, 1, _quantization_info);
149 TensorType bias = create_tensor<TensorType>(bias_shape, _bias_data_type, 1, _quantization_info);
150 TensorType dst = create_tensor<TensorType>(output_shape, _data_type, 1, _quantization_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100151
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +0100152 // Create Fully Connected layer info
153 FullyConnectedLayerInfo fc_info;
154 fc_info.transpose_weights = transpose_weights;
155 fc_info.are_weights_reshaped = !reshape_weights;
Giorgio Arena1856ff72020-02-07 13:46:45 +0000156 fc_info.activation_info = _activation_info;
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +0100157
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100158 // Create and configure function.
159 FunctionType fc;
Georgios Pinitas7d66a8e2018-07-17 12:28:42 +0100160 fc.configure(&src, &weights, &bias, &dst, fc_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100161
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100162 ARM_COMPUTE_ASSERT(src.info()->is_resizable());
163 ARM_COMPUTE_ASSERT(weights.info()->is_resizable());
164 ARM_COMPUTE_ASSERT(bias.info()->is_resizable());
165 ARM_COMPUTE_ASSERT(dst.info()->is_resizable());
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100166
Giorgio Arena63825e82021-03-25 14:54:50 +0000167 add_padding_x({ &src, &weights, &bias, &dst });
168
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100169 // Allocate tensors
170 src.allocator()->allocate();
171 weights.allocator()->allocate();
172 bias.allocator()->allocate();
173 dst.allocator()->allocate();
174
Michele Di Giorgio4fc10b32021-04-30 18:30:41 +0100175 ARM_COMPUTE_ASSERT(!src.info()->is_resizable());
176 ARM_COMPUTE_ASSERT(!weights.info()->is_resizable());
177 ARM_COMPUTE_ASSERT(!bias.info()->is_resizable());
178 ARM_COMPUTE_ASSERT(!dst.info()->is_resizable());
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100179
180 // Fill tensors
181 fill(AccessorType(src), 0);
182 fill(AccessorType(bias), 2);
183
184 if(!reshape_weights || !transpose_weights)
185 {
186 TensorShape tmp_shape(weights_shape);
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100187 RawTensor tmp(tmp_shape, _data_type, 1);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100188
189 // Fill with original shape
190 fill(tmp, 1);
191
192 // Transpose elementwise
193 tmp = transpose(tmp);
194
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100195 AccessorType weights_accessor(weights);
196
197 for(int i = 0; i < tmp.num_elements(); ++i)
198 {
199 Coordinates coord = index2coord(tmp.shape(), i);
200 std::copy_n(static_cast<const RawTensor::value_type *>(tmp(coord)),
201 tmp.element_size(),
202 static_cast<RawTensor::value_type *>(weights_accessor(coord)));
203 }
204 }
205 else
206 {
207 fill(AccessorType(weights), 1);
208 }
209
Manuel Bottinica62c6f2021-03-23 11:50:34 +0000210 if(_mixed_layout)
211 {
212 mix_layout(fc, src, dst);
213 }
214 else
215 {
216 // Compute NEFullyConnectedLayer function
217 fc.run();
218 }
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100219
220 return dst;
221 }
222
Michalis Spyrou6bff1952019-10-02 17:22:11 +0100223 SimpleTensor<T> compute_reference(const TensorShape &input_shape, const TensorShape &weights_shape, const TensorShape &bias_shape, const TensorShape &output_shape)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100224 {
225 // Create reference
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +0100226 SimpleTensor<T> src{ input_shape, _data_type, 1, _quantization_info };
227 SimpleTensor<T> weights{ weights_shape, _data_type, 1, _quantization_info };
228 SimpleTensor<TBias> bias{ bias_shape, _bias_data_type, 1, _quantization_info };
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100229
230 // Fill reference
231 fill(src, 0);
232 fill(weights, 1);
233 fill(bias, 2);
234
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100235 return reference::activation_layer(reference::fully_connected_layer<T>(src, weights, bias, output_shape, _quantization_info), _activation_info, _quantization_info);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100236 }
237
Giorgio Arena1856ff72020-02-07 13:46:45 +0000238 TensorType _target{};
239 SimpleTensor<T> _reference{};
240 DataType _data_type{};
241 DataType _bias_data_type{};
Giorgio Arena63825e82021-03-25 14:54:50 +0000242 bool _mixed_layout{ false };
Giorgio Arena1856ff72020-02-07 13:46:45 +0000243 QuantizationInfo _quantization_info{};
244 ActivationLayerInfo _activation_info{};
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100245};
246
Manuel Bottinica62c6f2021-03-23 11:50:34 +0000247template <typename TensorType, typename AccessorType, typename FunctionType, typename T, bool mixed_layout = false>
Giorgio Arenaa855af12018-07-16 17:20:38 +0100248class FullyConnectedLayerValidationFixture : public FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100249{
250public:
251 template <typename...>
Giorgio Arena1856ff72020-02-07 13:46:45 +0000252 void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights, DataType data_type,
253 ActivationLayerInfo activation_info)
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100254 {
Giorgio Arenaa855af12018-07-16 17:20:38 +0100255 FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(input_shape, weights_shape, bias_shape, output_shape, transpose_weights,
256 reshape_weights, data_type,
Manuel Bottinica62c6f2021-03-23 11:50:34 +0000257 QuantizationInfo(), activation_info, mixed_layout);
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000258 }
259};
260
Manuel Bottinica62c6f2021-03-23 11:50:34 +0000261template <typename TensorType, typename AccessorType, typename FunctionType, typename T, bool mixed_layout = false>
Giorgio Arenaa855af12018-07-16 17:20:38 +0100262class FullyConnectedLayerValidationQuantizedFixture : public FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000263{
264public:
265 template <typename...>
266 void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights, DataType data_type,
Giorgio Arena1856ff72020-02-07 13:46:45 +0000267 QuantizationInfo quantization_info, ActivationLayerInfo activation_info)
Georgios Pinitas45bcc3a2017-11-29 11:06:49 +0000268 {
Giorgio Arenaa855af12018-07-16 17:20:38 +0100269 FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>::setup(input_shape, weights_shape, bias_shape, output_shape, transpose_weights,
270 reshape_weights, data_type,
Manuel Bottinica62c6f2021-03-23 11:50:34 +0000271 quantization_info, activation_info, mixed_layout);
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100272 }
273};
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100274
275template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100276class FullyConnectedWithDynamicTensorsFixture : public framework::Fixture
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100277{
278private:
279 template <typename U>
280 void fill(U &&tensor, int i)
281 {
282 if(_data_type == DataType::F16)
283 {
284 arm_compute::utils::uniform_real_distribution_16bit<half> distribution(-1.0f, 1.0f);
285 library->fill(tensor, distribution, i);
286 }
287 else if(_data_type == DataType::F32)
288 {
289 std::uniform_real_distribution<float> distribution(-1.0f, 1.0f);
290 library->fill(tensor, distribution, i);
291 }
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100292 else if(_data_type == DataType::QASYMM8)
293 {
294 std::uniform_int_distribution<uint8_t> distribution(0, 30);
295 library->fill(tensor, distribution, i);
296 }
297 else if(_data_type == DataType::S32)
298 {
299 std::uniform_int_distribution<int32_t> distribution(-50, 50);
300 library->fill(tensor, distribution, i);
301 }
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100302 else
303 {
304 library->fill_tensor_uniform(tensor, i);
305 }
306 }
307
308 void fill_transposed_weights(TensorType &weights, TensorShape weights_shape, int seed)
309 {
310 RawTensor tmp(weights_shape, _data_type, 1);
311
312 // Fill with original shape
313 fill(tmp, seed);
314
315 // Transpose elementwise
316 tmp = transpose(tmp);
317
318 AccessorType weights_accessor(weights);
319
320 for(int i = 0; i < tmp.num_elements(); ++i)
321 {
322 Coordinates coord = index2coord(tmp.shape(), i);
323 std::copy_n(static_cast<const RawTensor::value_type *>(tmp(coord)),
324 tmp.element_size(),
325 static_cast<RawTensor::value_type *>(weights_accessor(coord)));
326 }
327 }
328
329 void validate_with_tolerance(TensorType &target, SimpleTensor<T> &ref)
330 {
331 if(_data_type == DataType::F32)
332 {
333 constexpr RelativeTolerance<float> rel_tolerance_f32(0.05f);
334 constexpr AbsoluteTolerance<float> abs_tolerance_f32(0.0001f);
335 validate(AccessorType(target), ref, rel_tolerance_f32, 0, abs_tolerance_f32);
336 }
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100337 else if(_data_type == DataType::QASYMM8)
338 {
339 constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8(1);
340 validate(AccessorType(target), ref, tolerance_qasymm8);
341 }
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100342 else
343 {
344 validate(AccessorType(target), ref);
345 }
346 }
347
348public:
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100349 using TDecay = typename std::decay<T>::type;
350 using TBias = typename std::conditional < (std::is_same<TDecay, uint8_t>::value || std::is_same<TDecay, int8_t>::value), int32_t, T >::type;
351
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100352 template <typename...>
353 void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100354 DataType data_type, ActivationLayerInfo activation_info, bool constant_weights, bool constant_bias)
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100355 {
356 _data_type = data_type;
357
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100358 const bool is_quantized = is_data_type_quantized(data_type);
359
360 const DataType bias_data_type = (is_quantized) ? DataType::S32 : data_type;
361
362 const QuantizationInfo src_qinfo = is_quantized ? QuantizationInfo(0.1f, 10) : QuantizationInfo();
363 const QuantizationInfo weights_qinfo = is_quantized ? QuantizationInfo(0.3f, 20) : QuantizationInfo();
364 const QuantizationInfo dst_qinfo = is_quantized ? QuantizationInfo(0.2f, 5) : QuantizationInfo();
365
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100366 // Setup tensor meta-data
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100367 const TensorInfo src_info(src_shape, 1, data_type, src_qinfo);
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100368 _src.allocator()->init(src_info);
369
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100370 TensorInfo wei_info(weights_shape, 1, data_type, weights_qinfo);
371 if(!constant_weights)
372 {
373 const TensorShape tr_weights_shape{ weights_shape[1], weights_shape[0] };
374 wei_info.set_tensor_shape(tr_weights_shape);
375 }
376 wei_info.set_are_values_constant(constant_weights);
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100377 _weights.allocator()->init(wei_info);
378
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100379 TensorInfo bias_info(bias_shape, 1, bias_data_type);
380 bias_info.set_are_values_constant(constant_bias);
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100381 _bias.allocator()->init(bias_info);
382
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100383 const TensorInfo dst_info(dst_shape, 1, data_type, dst_qinfo);
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100384 _dst.allocator()->init(dst_info);
385
386 // Configure FC layer and mark the weights as non constant
387 FullyConnectedLayerInfo fc_info;
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100388 fc_info.activation_info = activation_info;
389 if(!constant_weights)
390 {
391 fc_info.are_weights_reshaped = true;
392 fc_info.transpose_weights = false;
393 }
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100394 FunctionType fc;
395 fc.configure(&_src, &_weights, &_bias, &_dst, fc_info);
396
397 // Allocate all the tensors
398 _src.allocator()->allocate();
399 _weights.allocator()->allocate();
400 _bias.allocator()->allocate();
401 _dst.allocator()->allocate();
402
403 // Run multiple iterations with different inputs
404 constexpr int num_iterations = 5;
405 int randomizer_offset = 0;
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100406
407 // Create reference tensors
408 SimpleTensor<T> src{ src_shape, data_type, 1, src_qinfo };
409 SimpleTensor<T> weights{ weights_shape, data_type, 1, weights_qinfo };
410 SimpleTensor<TBias> bias{ bias_shape, bias_data_type };
411
412 // Fill weights and/or bias if they remain constant
413 if(constant_weights)
414 {
415 fill(AccessorType(_weights), 1);
416 fill(weights, 1);
417 }
418 if(constant_bias)
419 {
420 fill(AccessorType(_bias), 2);
421 fill(bias, 2);
422 }
423
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100424 for(int i = 0; i < num_iterations; ++i)
425 {
426 // Run target
427 {
428 fill(AccessorType(_src), randomizer_offset);
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100429 if(!constant_weights)
430 {
431 fill_transposed_weights(_weights, weights_shape, randomizer_offset + 1);
432 }
433 if(!constant_bias)
434 {
435 fill(AccessorType(_bias), randomizer_offset + 2);
436 }
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100437
438 fc.run();
439 }
440
441 // Run reference and compare
442 {
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100443 // Fill reference
444 fill(src, randomizer_offset);
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100445 if(!constant_weights)
446 {
447 fill(weights, randomizer_offset + 1);
448 }
449 if(!constant_bias)
450 {
451 fill(bias, randomizer_offset + 2);
452 }
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100453
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100454 auto dst = reference::activation_layer(reference::fully_connected_layer<T>(src, weights, bias, dst_shape, dst_qinfo), activation_info, dst_qinfo);
Georgios Pinitas72ee9b42021-05-10 22:26:37 +0100455
456 // Validate
457 validate_with_tolerance(_dst, dst);
458 }
459
460 randomizer_offset += 100;
461 }
462 }
463
464private:
465 TensorType _src{}, _weights{}, _bias{}, _dst{};
466 DataType _data_type{ DataType::UNKNOWN };
467};
Giorgio Arena63e0beb2021-09-24 14:04:27 +0100468
469template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
470class FullyConnectedWithDynamicWeightsFixture : public FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>
471{
472public:
473 template <typename...>
474 void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
475 DataType data_type, ActivationLayerInfo activation_info)
476 {
477 FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>::setup(src_shape, weights_shape, bias_shape,
478 dst_shape, data_type, activation_info, false, true);
479 }
480};
481
482template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
483class FullyConnectedWithDynamicBiasFixture : public FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>
484{
485public:
486 template <typename...>
487 void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
488 DataType data_type, ActivationLayerInfo activation_info)
489 {
490 FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>::setup(src_shape, weights_shape, bias_shape,
491 dst_shape, data_type, activation_info, true, false);
492 }
493};
Moritz Pflanzer69d33412017-08-09 11:45:15 +0100494} // namespace validation
495} // namespace test
496} // namespace arm_compute
497#endif /* ARM_COMPUTE_TEST_FULLY_CONNECTED_LAYER_FIXTURE */