blob: 1fef384a8eabb64548be121fc557a65b3defe2ed [file] [log] [blame]
Moritz Pflanzer572ade72017-07-21 17:36:33 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2017-2020 Arm Limited.
Moritz Pflanzer572ade72017-07-21 17:36:33 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/core/Types.h"
25#include "arm_compute/runtime/CL/CLTensor.h"
26#include "arm_compute/runtime/CL/CLTensorAllocator.h"
27#include "arm_compute/runtime/CL/functions/CLActivationLayer.h"
Georgios Pinitas12833d02019-07-25 13:31:10 +010028#include "arm_compute/runtime/RuntimeContext.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010029#include "tests/CL/CLAccessor.h"
30#include "tests/PaddingCalculator.h"
Moritz Pflanzera09de0c2017-09-01 20:41:12 +010031#include "tests/datasets/ActivationFunctionsDataset.h"
32#include "tests/datasets/ShapeDatasets.h"
33#include "tests/framework/Asserts.h"
34#include "tests/framework/Macros.h"
35#include "tests/framework/datasets/Datasets.h"
36#include "tests/validation/Validation.h"
37#include "tests/validation/fixtures/ActivationLayerFixture.h"
Moritz Pflanzer572ade72017-07-21 17:36:33 +010038
39namespace arm_compute
40{
41namespace test
42{
43namespace validation
44{
45namespace
46{
Manuel Bottini30dbeef2019-06-26 16:23:03 +010047constexpr AbsoluteTolerance<float> tolerance_qsymm16(1.f);
48
Moritz Pflanzer572ade72017-07-21 17:36:33 +010049/** Define tolerance of the activation layer.
50 *
51 * @param[in] activation The activation function used.
52 * @param[in] data_type Data type.
53 *
54 * @return Tolerance depending on the activation function.
55 */
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010056AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activation, DataType data_type)
Moritz Pflanzer572ade72017-07-21 17:36:33 +010057{
Moritz Pflanzerf07f1452017-08-08 17:28:39 +010058 constexpr float epsilon = 1e-6f;
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010059
Moritz Pflanzer572ade72017-07-21 17:36:33 +010060 switch(activation)
61 {
62 case ActivationLayerInfo::ActivationFunction::LINEAR:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010063 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.2f : epsilon);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010064 case ActivationLayerInfo::ActivationFunction::SQUARE:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010065 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.1f : epsilon);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010066 case ActivationLayerInfo::ActivationFunction::LOGISTIC:
Vidhya Sudhan Loganathan7485d5a2018-07-04 09:34:00 +010067 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : epsilon);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010068 case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010069 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010070 case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
Georgios Pinitasfb0fdcd2019-08-22 17:10:04 +010071 case ActivationLayerInfo::ActivationFunction::ELU:
Moritz Pflanzer572ade72017-07-21 17:36:33 +010072 case ActivationLayerInfo::ActivationFunction::SQRT:
Vidhya Sudhan Loganathan7485d5a2018-07-04 09:34:00 +010073 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010074 case ActivationLayerInfo::ActivationFunction::TANH:
Vidhya Sudhan Loganathan7485d5a2018-07-04 09:34:00 +010075 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
morgolock6b3865a2020-03-04 14:57:46 +000076 case ActivationLayerInfo::ActivationFunction::HARD_SWISH:
77 return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : epsilon);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010078 default:
Moritz Pflanzer6106a4d2017-08-02 09:42:27 +010079 return AbsoluteTolerance<float>(epsilon);
Moritz Pflanzer572ade72017-07-21 17:36:33 +010080 }
81}
82
83/** CNN data types */
84const auto CNNDataTypes = framework::dataset::make("DataType",
85{
86 DataType::F16,
Vidhya Sudhan Loganathan0fc25452018-06-18 14:40:56 +010087 DataType::F32
Moritz Pflanzer572ade72017-07-21 17:36:33 +010088});
89
90/** Input data sets. */
91const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), datasets::ActivationFunctions()), framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
92} // namespace
93
94TEST_SUITE(CL)
95TEST_SUITE(ActivationLayer)
96
Michalis Spyrou80943252019-01-10 17:19:50 +000097DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(combine(datasets::SmallShapes(), CNNDataTypes), framework::dataset::make("InPlace", { false, true })),
Moritz Pflanzer572ade72017-07-21 17:36:33 +010098 shape, data_type, in_place)
99{
Pablo Tellodb8485a2019-09-24 11:03:47 +0100100 // Create context
101 auto ctx = parameters->get_ctx<CLTensor>();
102
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100103 // Create tensors
Pablo Tellodb8485a2019-09-24 11:03:47 +0100104 CLTensor src = create_tensor<CLTensor>(shape, data_type, 1, QuantizationInfo(), DataLayout::NCHW, ctx);
105 CLTensor dst = create_tensor<CLTensor>(shape, data_type, 1, QuantizationInfo(), DataLayout::NCHW, ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100106
107 ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
108 ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS);
109
110 // Create and configure function
Pablo Tellodb8485a2019-09-24 11:03:47 +0100111 CLActivationLayer act_layer(ctx);
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100112
113 if(in_place)
114 {
115 act_layer.configure(&src, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS));
116 }
117 else
118 {
119 act_layer.configure(&src, &dst, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS));
120 }
121
122 // Validate valid region
123 const ValidRegion valid_region = shape_to_valid_region(shape);
124 validate(src.info()->valid_region(), valid_region);
125
126 if(!in_place)
127 {
128 validate(dst.info()->valid_region(), valid_region);
129 }
130
131 // Validate padding
132 const int step = 16 / arm_compute::data_size_from_type(data_type);
133 const PaddingSize padding = PaddingCalculator(shape.x(), step).required_padding();
134 validate(src.info()->padding(), padding);
135
136 if(!in_place)
137 {
138 validate(dst.info()->padding(), padding);
139 }
140}
141
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000142// *INDENT-OFF*
143// clang-format off
144DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
145 framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types
Giorgio Arena2995f5b2017-11-29 17:33:59 +0000146 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink
147 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
148 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8),
Georgios Pinitas4b3fba12019-06-04 17:31:46 +0100149 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8), // Invalid quantization info
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000150 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching shapes
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100151 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QSYMM16),
152 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QSYMM16),
153 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QSYMM16), // Invalid activation function for QSYMM16
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000154 }),
155 framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F16),
156 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32),
Giorgio Arena2995f5b2017-11-29 17:33:59 +0000157 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
158 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8),
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000159 TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8),
160 TensorInfo(TensorShape(30U, 11U, 2U), 1, DataType::F32),
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100161 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QSYMM16, QuantizationInfo(1.f / 32768.f, 0)),
162 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QSYMM16, QuantizationInfo(1.f / 32768.f, 0)),
163 TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QSYMM16, QuantizationInfo(1.f / 32768.f, 0)),
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000164 })),
165 framework::dataset::make("ActivationInfo", { ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
166 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
Giorgio Arena2995f5b2017-11-29 17:33:59 +0000167 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000168 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU),
Michele Di Giorgioa1f7e332018-01-22 17:26:36 +0000169 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::TANH),
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000170 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100171 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::TANH),
172 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC),
173 ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::SQRT),
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000174 })),
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100175 framework::dataset::make("Expected", { false, false, true, true, false, false, true, true, false })),
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000176 input_info, output_info, act_info, expected)
177{
Giorgio Arena2995f5b2017-11-29 17:33:59 +0000178 ARM_COMPUTE_EXPECT(bool(CLActivationLayer::validate(&input_info.clone()->set_is_resizable(false), (output_info.total_size() == 0) ? nullptr : &output_info.clone()->set_is_resizable(false), act_info)) == expected, framework::LogLevel::ERRORS);
Georgios Pinitasf9d3a0a2017-11-03 19:01:44 +0000179}
180// clang-format on
181// *INDENT-ON*
182
Vidhya Sudhan Loganathand646ae12018-11-19 15:18:20 +0000183/** [CLActivationLayerFixture snippet] **/
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100184template <typename T>
185using CLActivationLayerFixture = ActivationValidationFixture<CLTensor, CLAccessor, CLActivationLayer, T>;
Vidhya Sudhan Loganathand646ae12018-11-19 15:18:20 +0000186/** [CLActivationLayerFixture snippet] **/
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100187
188TEST_SUITE(Float)
189TEST_SUITE(FP16)
Vidhya Sudhan Loganathand646ae12018-11-19 15:18:20 +0000190/** [CLActivationLayer Test snippet] **/
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100191FIXTURE_DATA_TEST_CASE(RunSmall, CLActivationLayerFixture<half>, framework::DatasetMode::ALL, combine(combine(datasets::SmallShapes(), ActivationDataset),
192 framework::dataset::make("DataType",
193 DataType::F16)))
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100194{
195 // Validate output
196 validate(CLAccessor(_target), _reference, tolerance(_function, _data_type));
197}
Vidhya Sudhan Loganathand646ae12018-11-19 15:18:20 +0000198/** [CLActivationLayer Test snippet] **/
Michalis Spyrou80943252019-01-10 17:19:50 +0000199TEST_SUITE_END() // FP16
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100200
201TEST_SUITE(FP32)
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100202FIXTURE_DATA_TEST_CASE(RunSmall, CLActivationLayerFixture<float>, framework::DatasetMode::ALL, combine(combine(datasets::SmallShapes(), ActivationDataset), framework::dataset::make("DataType",
203 DataType::F32)))
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100204{
205 // Validate output
206 validate(CLAccessor(_target), _reference, tolerance(_function, _data_type));
207}
Michalis Spyrou80943252019-01-10 17:19:50 +0000208TEST_SUITE_END() // FP32
209TEST_SUITE_END() // Float
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100210
211template <typename T>
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000212using CLActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<CLTensor, CLAccessor, CLActivationLayer, T>;
213
morgolock6b3865a2020-03-04 14:57:46 +0000214const auto QuantizedActivationDataset8 = combine(combine(framework::dataset::make("InPlace", { false }),
215 concat(datasets::ActivationFunctionsQuantized(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),
216 framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
217
218const auto QuantizedActivationDataset16 = combine(combine(framework::dataset::make("InPlace", { false }),
219 datasets::ActivationFunctionsQuantized()),
220 framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000221
222TEST_SUITE(Quantized)
223TEST_SUITE(QASYMM8)
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100224FIXTURE_DATA_TEST_CASE(RunSmall, CLActivationLayerQuantizedFixture<uint8_t>, framework::DatasetMode::ALL, combine(combine(combine(datasets::SmallShapes(), QuantizedActivationDataset8),
225 framework::dataset::make("DataType",
226 DataType::QASYMM8)),
227 framework::dataset::make("QuantizationInfo", { QuantizationInfo(0.1f, 128.0f) })))
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000228{
229 // Validate output
230 validate(CLAccessor(_target), _reference, tolerance(_function, _data_type));
231}
Michele Di Giorgiocbbed282019-12-20 13:26:08 +0000232TEST_SUITE_END() // QASYMM8
233TEST_SUITE(QASYMM8_SIGNED)
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100234FIXTURE_DATA_TEST_CASE(RunSmall, CLActivationLayerQuantizedFixture<int8_t>, framework::DatasetMode::ALL, combine(combine(combine(datasets::SmallShapes(), QuantizedActivationDataset8),
235 framework::dataset::make("DataType",
236 DataType::QASYMM8_SIGNED)),
237 framework::dataset::make("QuantizationInfo", { QuantizationInfo(0.1f, 10.0f) })))
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000238{
239 // Validate output
240 validate(CLAccessor(_target), _reference, tolerance(_function, _data_type));
241}
Michele Di Giorgiocbbed282019-12-20 13:26:08 +0000242TEST_SUITE_END() // QASYMM8_SIGNED
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100243TEST_SUITE(QSYMM16)
Sang-Hoon Parka8a7c1d2020-05-12 22:01:23 +0100244FIXTURE_DATA_TEST_CASE(RunSmall, CLActivationLayerQuantizedFixture<int16_t>, framework::DatasetMode::ALL, combine(combine(combine(datasets::SmallShapes(), QuantizedActivationDataset16),
245 framework::dataset::make("DataType",
246 DataType::QSYMM16)),
247 framework::dataset::make("QuantizationInfo", { QuantizationInfo(1.f / 32768.f, 0) })))
Manuel Bottini30dbeef2019-06-26 16:23:03 +0100248{
249 // Validate output
250 validate(CLAccessor(_target), _reference, tolerance_qsymm16);
251}
252TEST_SUITE_END() // QSYMM16
Michalis Spyrou80943252019-01-10 17:19:50 +0000253TEST_SUITE_END() // Quantized
Michel Iwaniec66cc12f2017-12-07 17:26:40 +0000254
Michalis Spyrou80943252019-01-10 17:19:50 +0000255TEST_SUITE_END() // ActivationLayer
256TEST_SUITE_END() // CL
Moritz Pflanzer572ade72017-07-21 17:36:33 +0100257} // namespace validation
258} // namespace test
259} // namespace arm_compute