blob: dee264c6b8d32b2ca2fa73178a85af81bc8e9d99 [file] [log] [blame]
Moritz Pflanzer572ade72017-07-21 17:36:33 +01001/*
Michele Di Giorgiodde3ad92018-01-23 16:55:24 +00002 * Copyright (c) 2017-2018 ARM Limited.
Moritz Pflanzer572ade72017-07-21 17:36:33 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/core/Types.h"
25#include "arm_compute/runtime/NEON/functions/NEActivationLayer.h"
26#include "arm_compute/runtime/Tensor.h"
27#include "arm_compute/runtime/TensorAllocator.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010028#include "tests/NEON/Accessor.h"
29#include "tests/PaddingCalculator.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010030#include "tests/datasets/ActivationFunctionsDataset.h"
31#include "tests/datasets/ShapeDatasets.h"
32#include "tests/framework/Asserts.h"
33#include "tests/framework/Macros.h"
34#include "tests/framework/datasets/Datasets.h"
35#include "tests/validation/Validation.h"
36#include "tests/validation/fixtures/ActivationLayerFixture.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010037
38namespace arm_compute
39{
40namespace test
41{
42namespace validation
43{
44namespace
45{
46/** Define tolerance of the activation layer.
47 *
48 * @param[in] data_type The data type used.
49 * @param[in] activation The activation function used.
50 *
51 * @return Tolerance depending on the activation function.
52 */
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010053AbsoluteTolerance<float> tolerance(DataType data_type, ActivationLayerInfo::ActivationFunction activation)
Moritz Pflanzer572ade72017-07-21 17:36:33 +010054{
55 switch(activation)
56 {
57 case ActivationLayerInfo::ActivationFunction::LOGISTIC:
58 case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
59 case ActivationLayerInfo::ActivationFunction::SQRT:
60 case ActivationLayerInfo::ActivationFunction::TANH:
61 switch(data_type)
62 {
Moritz Pflanzer572ade72017-07-21 17:36:33 +010063 case DataType::F16:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010064 return AbsoluteTolerance<float>(0.01f);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010065 default:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010066 return AbsoluteTolerance<float>(0.00001f);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010067 }
68 break;
69 default:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010070 return AbsoluteTolerance<float>(0.f);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010071 }
72}
73
74/** CNN data types */
75const auto CNNDataTypes = framework::dataset::make("DataType",
76{
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000077#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
Moritz Pflanzer572ade72017-07-21 17:36:33 +010078 DataType::F16,
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +000079#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
Moritz Pflanzer572ade72017-07-21 17:36:33 +010080 DataType::F32,
Moritz Pflanzer572ade72017-07-21 17:36:33 +010081});
82
83/** Input data sets. */
84const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), datasets::ActivationFunctions()), framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
85} // namespace
86
87TEST_SUITE(NEON)
88TEST_SUITE(ActivationLayer)
89
90DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(combine(concat(datasets::SmallShapes(), datasets::LargeShapes()), CNNDataTypes), framework::dataset::make("InPlace", { false, true })),
91 shape, data_type, in_place)
92{
Moritz Pflanzer572ade72017-07-21 17:36:33 +010093 // Create tensors
Vidhya Sudhan Loganathan014333d2018-07-02 09:13:49 +010094 Tensor src = create_tensor<Tensor>(shape, data_type, 1);
95 Tensor dst = create_tensor<Tensor>(shape, data_type, 1);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010096
97 ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
98 ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS);
99
100 // Create and configure function
101 NEActivationLayer act_layer;
102
103 if(in_place)
104 {
105 act_layer.configure(&src, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS));
106 }
107 else
108 {
109 act_layer.configure(&src, &dst, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS));
110 }
111
112 // Validate valid region
113 const ValidRegion valid_region = shape_to_valid_region(shape);
114 validate(src.info()->valid_region(), valid_region);
115
116 if(!in_place)
117 {
118 validate(dst.info()->valid_region(), valid_region);
119 }
120
121 // Validate padding
122 const PaddingSize padding = PaddingCalculator(shape.x(), 16).required_padding();
123 validate(src.info()->padding(), padding);
124
125 if(!in_place)
126 {
127 validate(dst.info()->padding(), padding);
128 }
129}
130
Michalis Spyrouafa5d812017-11-30 14:25:57 +0000131// *INDENT-OFF*
132// clang-format off
133DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
134 framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types
135 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
136 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching shapes
Michalis Spyrouafa5d812017-11-30 14:25:57 +0000137 }),
138 framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F16),
139 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
140 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
Michalis Spyrouafa5d812017-11-30 14:25:57 +0000141 })),
142 framework::dataset::make("ActivationInfo", { ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
143 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
144 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
Michalis Spyrouafa5d812017-11-30 14:25:57 +0000145 })),
Vidhya Sudhan Loganathan0fc25452018-06-18 14:40:56 +0100146 framework::dataset::make("Expected", { false, true, false})),
Michalis Spyrouafa5d812017-11-30 14:25:57 +0000147 input_info, output_info, act_info, expected)
148{
149 bool is_valid = bool(NEActivationLayer::validate(&input_info.clone()->set_is_resizable(false), &output_info.clone()->set_is_resizable(false), act_info));
150 ARM_COMPUTE_EXPECT(is_valid == expected, framework::LogLevel::ERRORS);
151}
152// clang-format on
153// *INDENT-ON*
154
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100155template <typename T>
156using NEActivationLayerFixture = ActivationValidationFixture<Tensor, Accessor, NEActivationLayer, T>;
157
158TEST_SUITE(Float)
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +0000159#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100160TEST_SUITE(FP16)
Georgios Pinitas583137c2017-08-31 18:12:42 +0100161FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixture<half>, framework::DatasetMode::PRECOMMIT, combine(combine(datasets::SmallShapes(), ActivationDataset),
162 framework::dataset::make("DataType",
163 DataType::F16)))
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100164{
165 // Validate output
166 validate(Accessor(_target), _reference, tolerance(_data_type, _function));
167}
Georgios Pinitas583137c2017-08-31 18:12:42 +0100168FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerFixture<half>, framework::DatasetMode::NIGHTLY, combine(combine(datasets::LargeShapes(), ActivationDataset),
169 framework::dataset::make("DataType",
170 DataType::F16)))
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100171{
172 // Validate output
173 validate(Accessor(_target), _reference, tolerance(_data_type, _function));
174}
175TEST_SUITE_END()
Ioan-Cristian Szabo5edbd1c2017-11-13 13:34:08 +0000176#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100177
178TEST_SUITE(FP32)
179FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixture<float>, framework::DatasetMode::PRECOMMIT, combine(combine(datasets::SmallShapes(), ActivationDataset), framework::dataset::make("DataType",
180 DataType::F32)))
Pablo Tello7282d562018-06-14 15:35:49 +0100181
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100182{
183 // Validate output
184 validate(Accessor(_target), _reference, tolerance(_data_type, _function));
185}
Pablo Tello7282d562018-06-14 15:35:49 +0100186FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerFixture<float>, framework::DatasetMode::NIGHTLY, combine(combine(datasets::LargeShapes(), ActivationDataset),
187 framework::dataset::make("DataType", DataType::F32)))
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100188{
189 // Validate output
190 validate(Accessor(_target), _reference, tolerance(_data_type, _function));
191}
192TEST_SUITE_END()
193TEST_SUITE_END()
194
195template <typename T>
Michel Iwaniec5dfeae62017-11-29 10:48:23 +0000196using NEActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
197
198/** Input data sets. */
Michele Di Giorgiodde3ad92018-01-23 16:55:24 +0000199const auto QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction", { ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
200 ActivationLayerInfo::ActivationFunction::RELU
201 });
202
203const auto QuantizedActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), QuantizedActivationFunctionsDataset),
Michel Iwaniec5dfeae62017-11-29 10:48:23 +0000204 framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
205
206TEST_SUITE(Quantized)
207TEST_SUITE(QASYMM8)
208FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<uint8_t>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallShapes(), QuantizedActivationDataset),
209 framework::dataset::make("DataType",
210 DataType::QASYMM8)),
211 framework::dataset::make("QuantizationInfo", { QuantizationInfo(0.1f, 128.0f) })))
212{
213 // Validate output
214 validate(Accessor(_target), _reference, tolerance(_data_type, _function));
215}
216FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerQuantizedFixture<uint8_t>, framework::DatasetMode::NIGHTLY, combine(combine(combine(datasets::LargeShapes(), QuantizedActivationDataset),
217 framework::dataset::make("DataType",
218 DataType::QASYMM8)),
219 framework::dataset::make("QuantizationInfo", { QuantizationInfo(0.1f, 128.0f) })))
220{
221 // Validate output
222 validate(Accessor(_target), _reference, tolerance(_data_type, _function));
223}
224TEST_SUITE_END()
225TEST_SUITE_END()
226
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100227TEST_SUITE_END()
228TEST_SUITE_END()
229} // namespace validation
230} // namespace test
231} // namespace arm_compute