blob: 8cc20211f27d28e82b4ee4206dd11b11d5641432 [file] [log] [blame]
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +00001/*
2 * Copyright (c) 2023 Arm Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/core/Types.h"
25#include "arm_compute/runtime/NEON/functions/NEMatMul.h"
26
27#include "tests/NEON/Accessor.h"
28#include "tests/framework/Asserts.h"
29#include "tests/framework/Macros.h"
30#include "tests/framework/datasets/Datasets.h"
31#include "tests/validation/Validation.h"
32
33#include "tests/datasets/LargeMatMulDataset.h"
34#include "tests/datasets/SmallMatMulDataset.h"
35#include "tests/validation/fixtures/MatMulFixture.h"
36
37namespace arm_compute
38{
39namespace test
40{
41namespace validation
42{
Viet-Hoa Doc85edf12023-09-01 16:48:17 +010043using framework::dataset::make;
44
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +000045TEST_SUITE(NEON)
46TEST_SUITE(MatMul)
47
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +010048constexpr AbsoluteTolerance<float> tolerance_fp32(0.001f); /**< Tolerance value for comparing reference's output against implementation's output for FP32 data types */
49const AbsoluteTolerance<half> tolerance_fp16(half(0.1f));
Ramy Elgammalaf150762023-04-25 17:19:27 +010050#ifdef __aarch64__
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +010051constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8(0);
52constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8_signed(0);
Ramy Elgammalaf150762023-04-25 17:19:27 +010053#endif // __aarch64__
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +000054
55// clang-format off
56// *INDENT-OFF*
57// Validation Tests
Viet-Hoa Doc85edf12023-09-01 16:48:17 +010058DATA_TEST_CASE(Validate, framework::DatasetMode::ALL,
59 zip(
60 make("InputAInfo", {
61 TensorInfo(TensorShape(9U, 6U), 1, DataType::F32), // Mismatching datatype
62 TensorInfo(TensorShape(9U, 6U), 1, DataType::S32), // Unsupported datatypes
63 TensorInfo(TensorShape(9U, 6U, 2U), 1, DataType::F32), // Broadcasting in batch dimension not supported
64 TensorInfo(TensorShape(9U, 6U), 1, DataType::F32), // Invalid shape for multiplication
65 TensorInfo(TensorShape(9U, 6U), 1, DataType::F32),
66 TensorInfo(TensorShape(9U, 6U , 12U) , 1 , DataType::F32),
67 TensorInfo(TensorShape(9U, 6U , 12U) , 1 , DataType::F32), // Tensors are not dynamic
68 TensorInfo(TensorShape(9U, 6U), 1, DataType::QASYMM8),
69 TensorInfo(TensorShape(9U, 6U), 1, DataType::QASYMM8_SIGNED),
70 TensorInfo(TensorShape(9U, 6U), 1, DataType::QASYMM8_SIGNED), // Mismatching data type
71 }),
72 make("InputBInfo", {
73 TensorInfo(TensorShape(5U, 9U), 1, DataType::QASYMM8),
74 TensorInfo(TensorShape(5U, 9U), 1, DataType::S32),
75 TensorInfo(TensorShape(5U, 9U, 1U), 1, DataType::F32),
76 TensorInfo(TensorShape(5U, 12U), 1, DataType::F32),
77 TensorInfo(TensorShape(5U, 9U), 1, DataType::F32),
78 TensorInfo(TensorShape(5U, 9U, 12U), 1, DataType::F32),
79 TensorInfo(TensorShape(5U, 9U, 12U), 1, DataType::F32),
80 TensorInfo(TensorShape(5U, 9U), 1, DataType::QASYMM8),
81 TensorInfo(TensorShape(5U, 9U), 1, DataType::QASYMM8_SIGNED),
82 TensorInfo(TensorShape(5U, 9U), 1, DataType::QASYMM8_SIGNED),
83 }),
84 make("OutputInfo", {
85 TensorInfo(TensorShape(5U, 6U), 1, DataType::F32),
86 TensorInfo(TensorShape(5U, 6U), 1, DataType::S32),
87 TensorInfo(TensorShape(5U, 6U, 2U), 1, DataType::F32),
88 TensorInfo(TensorShape(5U, 6U), 1, DataType::F32),
89 TensorInfo(TensorShape(5U, 6U), 1, DataType::F32),
90 TensorInfo(TensorShape(5U, 6U, 12U) , 1, DataType::F32),
91 TensorInfo(TensorShape(5U, 6U, 12U) , 1, DataType::F32),
92 TensorInfo(TensorShape(5U, 6U), 1, DataType::QASYMM8),
93 TensorInfo(TensorShape(5U, 6U), 1, DataType::QASYMM8_SIGNED),
94 TensorInfo(TensorShape(5U, 6U), 1, DataType::QASYMM8),
95 }),
96 make("TensorIsConst", {false, false, false, false, false , false, true, false, false, false}),
97 make("Expected", { false, false, false, false, true, true, false, true, true, false })),
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +000098 a_info, b_info, output_info, are_tensors_const, expected)
99{
100 TensorInfo a{a_info};
101 TensorInfo b{b_info};
102 a.set_are_values_constant(are_tensors_const);
103 b.set_are_values_constant(are_tensors_const);
104 Status status = NEMatMul::validate(&a,
105 &b,
106 &output_info,
107 MatMulInfo(),
108 CpuMatMulSettings());
109 ARM_COMPUTE_EXPECT(bool(status) == expected, framework::LogLevel::ERRORS);
110}
111// *INDENT-ON*
112// clang-format on
113
114// Generic Template
115template <typename T>
116using NEMatMulFixture = MatMulValidationWithActivationFixture<Tensor, Accessor, NEMatMul, CpuMatMulSettings, T>;
117
118// Fast math Template
119template <typename T>
120using NEMatMulFastMathFixture = MatMulGenericValidationFixture<Tensor, Accessor, NEMatMul, CpuMatMulSettings, T>;
121
122template <typename T>
123using NEMatMulDynamicTensorsFixture = MatMulValidationWithDynamicTensorsFixture<Tensor, Accessor, NEMatMul, CpuMatMulSettings, T>;
124
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100125template <typename T>
126using NEQuantizedMatMulFixture = QuantizedMatMulValidationFixture<Tensor, Accessor, NEMatMul, CpuMatMulSettings, T>;
127
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000128TEST_SUITE(Float)
129TEST_SUITE(FP32)
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100130FIXTURE_DATA_TEST_CASE(RunSmall, NEMatMulFixture<float>, framework::DatasetMode::PRECOMMIT,
131 combine(
132 datasets::SmallMatMulDataset(),
133 make("TransposeA", { false, true }),
134 make("TransposeB", { false, true }),
135 make("DataType", DataType::F32),
136 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) })))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000137{
138 // Validate output
139 validate(Accessor(_target), _reference, tolerance_fp32);
140}
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100141FIXTURE_DATA_TEST_CASE(RunLarge, NEMatMulFixture<float>, framework::DatasetMode::NIGHTLY,
142 combine(
143 datasets::LargeMatMulDataset(),
144 make("TransposeA", { false, true }),
145 make("TransposeB", { false, true }),
146 make("DataType", DataType::F32),
147 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) })))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000148{
149 // Validate output
150 validate(Accessor(_target), _reference, tolerance_fp32);
151}
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100152FIXTURE_DATA_TEST_CASE(RunHighDimensions, NEMatMulFixture<float>, framework::DatasetMode::NIGHTLY,
153 combine(
154 datasets::HighDimensionalMatMulDataset(),
155 make("TransposeA", { false, true }),
156 make("TransposeB", { false, true }),
157 make("DataType", DataType::F32),
158 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) })))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000159{
160 // Validate output
161 validate(Accessor(_target), _reference, tolerance_fp32);
162}
163
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100164FIXTURE_DATA_TEST_CASE(RunStressDynamicTensors, NEMatMulDynamicTensorsFixture<float>, framework::DatasetMode::PRECOMMIT,
165 combine(
166 datasets::SmallMatMulDataset(),
167 make("TransposeA", { false, true }),
168 make("TransposeB", { false, true }),
169 make("DataType", DataType::F32),
170 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) }),
171 make("NumberOfRuns", 5)))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000172{
173 // Validate output
174 validate(Accessor(_target), _reference, tolerance_fp32);
175}
176TEST_SUITE_END() // FP32
177
178#ifdef ARM_COMPUTE_ENABLE_BF16
179/* Note : MatMul BF16 is enabled by specifying FP32 datatype and enabling the fast math setting */
180constexpr AbsoluteTolerance<float> tolerance_bf16(0.001f);
181TEST_SUITE(BF16)
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100182FIXTURE_DATA_TEST_CASE(RunSmall, NEMatMulFastMathFixture<float>, framework::DatasetMode::PRECOMMIT,
183 combine(
184 datasets::SmallMatMulDataset(),
185 make("TransposeA", { false, true }),
186 make("TransposeB", { false, true }),
187 make("DataType", DataType::F32),
188 make("ActivationInfo", { ActivationLayerInfo() }),
189 make("RunTimes", { 0 }),
190 make("Settings", { CpuMatMulSettings().fast_math(true) }),
191 make("LhsQInfo", { QuantizationInfo() }),
192 make("RhsQInfo", { QuantizationInfo() }),
193 make("OutQInfo", { QuantizationInfo() }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100194)
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000195{
196 // Validate output
197 validate(Accessor(_target), _reference, tolerance_bf16);
198}
199TEST_SUITE_END() // BF16
200#endif /* ARM_COMPUTE_ENABLE_BF16 */
201
202#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
203TEST_SUITE(FP16)
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100204FIXTURE_DATA_TEST_CASE(RunSmall, NEMatMulFixture<half>, framework::DatasetMode::PRECOMMIT,
205 combine(
206 datasets::SmallMatMulDataset(),
207 make("TransposeA", { false, true }),
208 make("TransposeB", { false, true }),
209 make("DataType", DataType::F16),
210 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) })))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000211{
212 // Validate output
213 validate(Accessor(_target), _reference, tolerance_fp16);
214}
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100215FIXTURE_DATA_TEST_CASE(RunLarge, NEMatMulFixture<half>, framework::DatasetMode::NIGHTLY,
216 combine(
217 datasets::LargeMatMulDataset(),
218 make("TransposeA", { false, true }),
219 make("TransposeB", { false, true }),
220 make("DataType", DataType::F16),
221 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) })))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000222{
223 // Validate output
224 validate(Accessor(_target), _reference, tolerance_fp16);
225}
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100226FIXTURE_DATA_TEST_CASE(RunStressDynamicTensors, NEMatMulDynamicTensorsFixture<half>, framework::DatasetMode::PRECOMMIT,
227 combine(
228 datasets::SmallMatMulDataset(),
229 make("TransposeA", { false, true }),
230 make("TransposeB", { false, true }),
231 make("DataType", DataType::F16),
232 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) }),
233 make("NumberOfRuns", 5)))
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000234{
235 // Validate output
236 validate(Accessor(_target), _reference, tolerance_fp16);
237}
238TEST_SUITE_END() // FP16
239#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
240
241TEST_SUITE_END() // Float
242
Ramy Elgammal05a65e32023-04-24 01:58:21 +0100243#ifdef __aarch64__ // All the GeMM CPU assembly kernels for integer datatypes require aarch64
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100244TEST_SUITE(Quantized)
245
246TEST_SUITE(QASYMM8)
247
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100248FIXTURE_DATA_TEST_CASE(RunSmall, NEQuantizedMatMulFixture<uint8_t>, framework::DatasetMode::PRECOMMIT,
249 combine(
250 datasets::SmallMatMulDataset(),
251 make("TransposeA", { false, true }),
252 make("TransposeB", { false, true }),
253 make("DataType", DataType::QASYMM8),
254 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) }),
255 make("NumberOfExtraRuns", { 0, 1 }),
256 make("LhsQInfo", { QuantizationInfo(1.f / 50, 1) }),
257 make("RhsQInfo", { QuantizationInfo(1.f / 30, -1) }),
258 make("OutQInfo", { QuantizationInfo(1.f, 2) }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100259)
260{
261 // Validate output
262 validate(Accessor(_target), _reference, tolerance_qasymm8);
263}
264
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100265FIXTURE_DATA_TEST_CASE(RunSmallExtraActivation, NEQuantizedMatMulFixture<uint8_t>, framework::DatasetMode::NIGHTLY,
266 combine(
267 datasets::SmallerMatMulDataset(),
268 make("TransposeA", { false, true }),
269 make("TransposeB", { false, true }),
270 make("DataType", DataType::QASYMM8),
271 make("ActivationInfo", { ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU) }),
272 make("NumberOfExtraRuns", { 0, 1 }),
273 make("LhsQInfo", { QuantizationInfo(1.f / 50, 1) }),
274 make("RhsQInfo", { QuantizationInfo(1.f / 30, -1) }),
275 make("OutQInfo", { QuantizationInfo(1.f, 2) }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100276)
277{
278 // Validate output
279 validate(Accessor(_target), _reference, tolerance_qasymm8);
280}
281
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100282FIXTURE_DATA_TEST_CASE(RunLarge, NEQuantizedMatMulFixture<uint8_t>, framework::DatasetMode::NIGHTLY,
283 combine(
284 datasets::LargeMatMulDataset(),
285 make("TransposeA", { false, true }),
286 make("TransposeB", { false, true }),
287 make("DataType", DataType::QASYMM8),
288 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) }),
289 make("NumberOfExtraRuns", { 0, 1 }),
290 make("LhsQInfo", { QuantizationInfo(1.f / 100, 1) }),
291 make("RhsQInfo", { QuantizationInfo(1.f / 200, -1) }),
292 make("OutQInfo", { QuantizationInfo(1.f, 2) }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100293)
294{
295 // Validate output
296 validate(Accessor(_target), _reference, tolerance_qasymm8);
297}
298
299TEST_SUITE_END() // QASYMM8
300
301TEST_SUITE(QASYMM8_SIGNED)
302
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100303FIXTURE_DATA_TEST_CASE(RunSmall, NEQuantizedMatMulFixture<int8_t>, framework::DatasetMode::PRECOMMIT,
304 combine(
305 datasets::SmallMatMulDataset(),
306 make("TransposeA", { false, true }),
307 make("TransposeB", { false, true }),
308 make("DataType", DataType::QASYMM8_SIGNED),
309 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) }),
310 make("NumberOfExtraRuns", { 0, 1 }),
311 make("LhsQInfo", { QuantizationInfo(1.f / 40, -2) }),
312 make("RhsQInfo", { QuantizationInfo(1.f / 50, 1) }),
313 make("OutQInfo", { QuantizationInfo(1.f, 1) }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100314)
315{
316 // Validate output
317 validate(Accessor(_target), _reference, tolerance_qasymm8_signed);
318}
319
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100320FIXTURE_DATA_TEST_CASE(RunSmallExtraActivation, NEQuantizedMatMulFixture<int8_t>, framework::DatasetMode::NIGHTLY,
321 combine(
322 datasets::SmallerMatMulDataset(),
323 make("TransposeA", { false, true }),
324 make("TransposeB", { false, true }),
325 make("DataType", DataType::QASYMM8_SIGNED),
326 make("ActivationInfo", { ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU) }),
327 make("NumberOfExtraRuns", { 0, 1 }),
328 make("LhsQInfo", { QuantizationInfo(1.f / 40, -2) }),
329 make("RhsQInfo", { QuantizationInfo(1.f / 50, 1) }),
330 make("OutQInfo", { QuantizationInfo(1.f, 1) }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100331)
332{
333 // Validate output
334 validate(Accessor(_target), _reference, tolerance_qasymm8_signed);
335}
336
Viet-Hoa Doc85edf12023-09-01 16:48:17 +0100337FIXTURE_DATA_TEST_CASE(RunLarge, NEQuantizedMatMulFixture<int8_t>, framework::DatasetMode::NIGHTLY,
338 combine(
339 datasets::LargeMatMulDataset(),
340 make("TransposeA", { false, true }),
341 make("TransposeB", { false, true }),
342 make("DataType", DataType::QASYMM8_SIGNED),
343 make("ActivationInfo", { ActivationLayerInfo(), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU) }),
344 make("NumberOfExtraRuns", { 0, 1 }),
345 make("LhsQInfo", { QuantizationInfo(1.f / 150, -2) }),
346 make("RhsQInfo", { QuantizationInfo(1.f / 250, 1) }),
347 make("OutQInfo", { QuantizationInfo(1.f, 1) }))
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100348)
349{
350 // Validate output
351 validate(Accessor(_target), _reference, tolerance_qasymm8_signed);
352}
353
354TEST_SUITE_END() // QASYMM8_SIGNED
355
356TEST_SUITE_END() // Quantized
Ramy Elgammal05a65e32023-04-24 01:58:21 +0100357#endif // __aarch64__
Viet-Hoa Do9c7c2d22023-04-11 17:16:27 +0100358
Mohammed Suhail Munshia1b1e412023-03-23 22:21:31 +0000359TEST_SUITE_END() // MatMul
360TEST_SUITE_END() // NEON
361} // namespace validation
362} // namespace test
363} // namespace arm_compute