blob: 7bf1f2633ef2f16a3c0a99afb4d5eabb38d5c201 [file] [log] [blame]
Sanghoon Lee96883782017-09-15 14:10:48 +01001/*
Giorgio Arena11674872018-02-07 15:38:12 +00002 * Copyright (c) 2017-2018 ARM Limited.
Sanghoon Lee96883782017-09-15 14:10:48 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/core/Types.h"
25#include "arm_compute/runtime/NEON/functions/NEBatchNormalizationLayer.h"
26#include "arm_compute/runtime/Tensor.h"
27#include "arm_compute/runtime/TensorAllocator.h"
28#include "tests/NEON/Accessor.h"
29#include "tests/PaddingCalculator.h"
30#include "tests/datasets/RandomBatchNormalizationLayerDataset.h"
31#include "tests/datasets/ShapeDatasets.h"
32#include "tests/framework/Asserts.h"
33#include "tests/framework/Macros.h"
34#include "tests/framework/datasets/Datasets.h"
35#include "tests/validation/Validation.h"
36#include "tests/validation/fixtures/BatchNormalizationLayerFixture.h"
37
38namespace arm_compute
39{
40namespace test
41{
42namespace validation
43{
44namespace
45{
46constexpr AbsoluteTolerance<float> tolerance_f32(0.00001f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::F32 */
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000047#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
Sanghoon Lee96883782017-09-15 14:10:48 +010048constexpr AbsoluteTolerance<float> tolerance_f16(0.01f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::F16 */
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000049#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
Sanghoon Lee96883782017-09-15 14:10:48 +010050constexpr AbsoluteTolerance<float> tolerance_qs8(3.0f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::QS8 */
51constexpr AbsoluteTolerance<float> tolerance_qs16(6.0f); /**< Tolerance value for comparing reference's output against implementation's output for DataType::QS16 */
Giorgio Arena11674872018-02-07 15:38:12 +000052const auto act_infos = framework::dataset::make("ActivationInfo",
53{
54 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
55 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f),
56 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 8.f, 2.f),
57});
Sanghoon Lee96883782017-09-15 14:10:48 +010058} // namespace
59
60TEST_SUITE(NEON)
61TEST_SUITE(BatchNormalizationLayer)
62
63template <typename T>
64using NEBatchNormalizationLayerFixture = BatchNormalizationLayerValidationFixture<Tensor, Accessor, NEBatchNormalizationLayer, T>;
65
Michele Di Giorgio4d336302018-03-02 09:43:54 +000066DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(combine(datasets::RandomBatchNormalizationLayerDataset(),
67 combine(framework::dataset::make("UseBeta", { false, true }), framework::dataset::make("UseGamma", { false, true }))),
68 framework::dataset::make("DataType", { DataType::QS8, DataType::QS16, DataType::F32 })),
69 shape0, shape1, epsilon, use_beta, use_gamma, dt)
Sanghoon Lee96883782017-09-15 14:10:48 +010070{
71 // Set fixed point position data type allowed
Sanghoon Leec1294fa2017-11-17 11:47:41 +000072 const int fixed_point_position = (arm_compute::is_data_type_fixed_point(dt)) ? 3 : 0;
Sanghoon Lee96883782017-09-15 14:10:48 +010073
74 // Create tensors
75 Tensor src = create_tensor<Tensor>(shape0, dt, 1, fixed_point_position);
76 Tensor dst = create_tensor<Tensor>(shape0, dt, 1, fixed_point_position);
77 Tensor mean = create_tensor<Tensor>(shape1, dt, 1, fixed_point_position);
78 Tensor var = create_tensor<Tensor>(shape1, dt, 1, fixed_point_position);
79 Tensor beta = create_tensor<Tensor>(shape1, dt, 1, fixed_point_position);
80 Tensor gamma = create_tensor<Tensor>(shape1, dt, 1, fixed_point_position);
81
82 // Create and Configure function
83 NEBatchNormalizationLayer norm;
Michele Di Giorgio4d336302018-03-02 09:43:54 +000084 Tensor *beta_ptr = use_beta ? &beta : nullptr;
85 Tensor *gamma_ptr = use_gamma ? &gamma : nullptr;
86 norm.configure(&src, &dst, &mean, &var, beta_ptr, gamma_ptr, epsilon);
Sanghoon Lee96883782017-09-15 14:10:48 +010087
88 // Validate valid region
89 const ValidRegion valid_region = shape_to_valid_region(shape0);
90 validate(dst.info()->valid_region(), valid_region);
91}
92
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +000093// *INDENT-OFF*
94// clang-format off
Giorgio Arena11674872018-02-07 15:38:12 +000095DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(zip(
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +000096 framework::dataset::make("InputInfo", { TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
97 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink
98 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Mismatching data types
99 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Mismatching data types
100 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Invalid mean/var/beta/gamma shape
101 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 2), // Mismatching fixed point position
Giorgio Arena11674872018-02-07 15:38:12 +0000102 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 2), // Fused activation with fixed point not supported
103 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), // Fused activation's a < b
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +0000104 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 2),
105 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 2),
106 }),
107 framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
108 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32),
109 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
110 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F16),
111 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
112 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 3),
Giorgio Arena11674872018-02-07 15:38:12 +0000113 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
114 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 3),
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +0000115 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QS8, 2),
116 TensorInfo(),
117 })),
118 framework::dataset::make("MVBGInfo",{ TensorInfo(TensorShape(2U), 1, DataType::F32),
119 TensorInfo(TensorShape(2U), 1, DataType::F32),
120 TensorInfo(TensorShape(2U), 1, DataType::F16),
121 TensorInfo(TensorShape(2U), 1, DataType::F32),
122 TensorInfo(TensorShape(5U), 1, DataType::F32),
123 TensorInfo(TensorShape(2U), 1, DataType::QS8, 2),
124 TensorInfo(TensorShape(2U), 1, DataType::QS8, 2),
Giorgio Arena11674872018-02-07 15:38:12 +0000125 TensorInfo(TensorShape(2U), 1, DataType::F32),
126 TensorInfo(TensorShape(2U), 1, DataType::QS8, 2),
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +0000127 TensorInfo(TensorShape(2U), 1, DataType::QS8, 2),
128 })),
Giorgio Arena11674872018-02-07 15:38:12 +0000129 framework::dataset::make("ActivationLayerInfo",{ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
130 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
131 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f),
132 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.f),
133 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f),
134 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f, 2.f),
135 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.f, 2.f),
136 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 2.f, 6.f),
137 ActivationLayerInfo(),
138 ActivationLayerInfo(),
139 })),
140 framework::dataset::make("Expected", { true, false, false, false, false, false, false, false, true, true})),
141 input_info, output_info, mvbg_info, act_info, expected)
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +0000142{
143 const auto &mean_info = mvbg_info;
144 const auto &var_info = mvbg_info;
145 const auto &beta_info = mvbg_info;
146 const auto &gamma_info = mvbg_info;
147 bool has_error = bool(NEBatchNormalizationLayer::validate(
148 &input_info.clone()->set_is_resizable(false), output_info.total_size() ? &output_info.clone()->set_is_resizable(false) : nullptr,
149 &mean_info.clone()->set_is_resizable(false), &var_info.clone()->set_is_resizable(false),
Giorgio Arena11674872018-02-07 15:38:12 +0000150 &beta_info.clone()->set_is_resizable(false), &gamma_info.clone()->set_is_resizable(false), 1.f, act_info));
Ioan-Cristian Szabo303be902017-11-27 16:31:10 +0000151 ARM_COMPUTE_EXPECT(has_error == expected, framework::LogLevel::ERRORS);
152}
153// clang-format on
154// *INDENT-ON*
155
Sanghoon Lee96883782017-09-15 14:10:48 +0100156TEST_SUITE(Float)
Michele Di Giorgio4d336302018-03-02 09:43:54 +0000157FIXTURE_DATA_TEST_CASE(Random, NEBatchNormalizationLayerFixture<float>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::RandomBatchNormalizationLayerDataset(),
158 combine(framework::dataset::make("UseBeta", { false, true }),
159 framework::dataset::make("UseGamma", { false, true }))),
Giorgio Arena11674872018-02-07 15:38:12 +0000160 act_infos),
Sanghoon Lee96883782017-09-15 14:10:48 +0100161 framework::dataset::make("DataType", DataType::F32)))
162{
163 // Validate output
164 validate(Accessor(_target), _reference, tolerance_f32, 0);
165}
166TEST_SUITE_END()
167
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +0000168#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
Sanghoon Lee96883782017-09-15 14:10:48 +0100169TEST_SUITE(Float16)
Michele Di Giorgio4d336302018-03-02 09:43:54 +0000170FIXTURE_DATA_TEST_CASE(Random, NEBatchNormalizationLayerFixture<half>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::RandomBatchNormalizationLayerDataset(),
171 combine(framework::dataset::make("UseBeta", { false, true }),
172 framework::dataset::make("UseGamma", { false, true }))),
Georgios Pinitas57c033b2018-02-15 12:29:44 +0000173 framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
Sanghoon Lee96883782017-09-15 14:10:48 +0100174 framework::dataset::make("DataType", DataType::F16)))
175{
176 // Validate output
177 validate(Accessor(_target), _reference, tolerance_f16, 0);
178}
179TEST_SUITE_END()
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +0000180#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
Sanghoon Lee96883782017-09-15 14:10:48 +0100181
182TEST_SUITE(Quantized)
183template <typename T>
184using NEBatchNormalizationLayerFixedPointFixture = BatchNormalizationLayerValidationFixedPointFixture<Tensor, Accessor, NEBatchNormalizationLayer, T>;
185
186TEST_SUITE(QS8)
Michele Di Giorgio4d336302018-03-02 09:43:54 +0000187FIXTURE_DATA_TEST_CASE(Random, NEBatchNormalizationLayerFixedPointFixture<int8_t>, framework::DatasetMode::PRECOMMIT,
188 combine(combine(combine(combine(combine(datasets::RandomBatchNormalizationLayerDataset(),
189 framework::dataset::make("UseBeta", false)),
190 framework::dataset::make("UseGamma", false)),
191 framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
192 framework::dataset::make("DataType", DataType::QS8)),
193 framework::dataset::make("FractionalBits", 1, 6)))
Sanghoon Lee96883782017-09-15 14:10:48 +0100194{
195 // Validate output
196 validate(Accessor(_target), _reference, tolerance_qs8, 0);
197}
198TEST_SUITE_END()
199
200TEST_SUITE(QS16)
Michele Di Giorgio4d336302018-03-02 09:43:54 +0000201FIXTURE_DATA_TEST_CASE(Random, NEBatchNormalizationLayerFixedPointFixture<int16_t>, framework::DatasetMode::PRECOMMIT,
202 combine(combine(combine(combine(combine(datasets::RandomBatchNormalizationLayerDataset(),
203 framework::dataset::make("UseBeta", false)),
204 framework::dataset::make("UseGamma", false)),
205 framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
206 framework::dataset::make("DataType", DataType::QS16)),
207 framework::dataset::make("FractionalBits", 1, 14)))
Sanghoon Lee96883782017-09-15 14:10:48 +0100208{
209 // Validate output
210 validate(Accessor(_target), _reference, tolerance_qs16, 0);
211}
212TEST_SUITE_END()
213
214TEST_SUITE_END()
215
216TEST_SUITE_END()
217TEST_SUITE_END()
218} // namespace validation
219} // namespace test
220} // namespace arm_compute