blob: adff6e03b89125f9d27839ea915f478ad04a493e [file] [log] [blame]
Cathal Corbettbd18eab2022-11-15 12:56:16 +00001//
Tianle Chengfa62fdc2023-12-15 12:38:40 +00002// Copyright © 2022-2024 Arm Ltd and Contributors. All rights reserved.
Cathal Corbettbd18eab2022-11-15 12:56:16 +00003// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include <armnn/INetwork.hpp>
8#include <armnn/Types.hpp>
9
10#include <CommonTestUtils.hpp>
11#include <ResolveType.hpp>
12
13#include <doctest/doctest.h>
14
15namespace
16{
17
18using namespace armnn;
19
20template<typename armnn::DataType DataType>
21armnn::INetworkPtr CreatePooling2dNetwork(const armnn::TensorShape& inputShape,
22 const armnn::TensorShape& outputShape,
23 PaddingMethod padMethod = PaddingMethod::Exclude,
24 PoolingAlgorithm poolAlg = PoolingAlgorithm::Max,
25 const float qScale = 1.0f,
26 const int32_t qOffset = 0)
27{
28 INetworkPtr network(INetwork::Create());
29
30 TensorInfo inputTensorInfo(inputShape, DataType, qScale, qOffset, true);
31 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset, true);
32
33 Pooling2dDescriptor descriptor;
34 descriptor.m_PoolType = poolAlg;
35 descriptor.m_PoolWidth = descriptor.m_PoolHeight = 3;
36 descriptor.m_StrideX = descriptor.m_StrideY = 1;
37 descriptor.m_PadLeft = 1;
38 descriptor.m_PadRight = 1;
39 descriptor.m_PadTop = 1;
40 descriptor.m_PadBottom = 1;
41 descriptor.m_PaddingMethod = padMethod;
42 descriptor.m_DataLayout = DataLayout::NHWC;
43
44 IConnectableLayer* pool = network->AddPooling2dLayer(descriptor, "pool");
45 IConnectableLayer* input = network->AddInputLayer(0, "input");
46 IConnectableLayer* output = network->AddOutputLayer(0, "output");
47
48 Connect(input, pool, inputTensorInfo, 0, 0);
49 Connect(pool, output, outputTensorInfo, 0, 0);
50
51 return network;
52}
53
54template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
55void MaxPool2dEndToEnd(const std::vector<armnn::BackendId>& backends,
56 PaddingMethod padMethod = PaddingMethod::Exclude)
57{
58 const TensorShape& inputShape = { 1, 3, 3, 1 };
59 const TensorShape& outputShape = { 1, 3, 3, 1 };
60
61 INetworkPtr network = CreatePooling2dNetwork<ArmnnType>(inputShape, outputShape, padMethod);
62
63 CHECK(network);
64
65 std::vector<T> inputData{ 1, 2, 3,
66 4, 5, 6,
67 7, 8, 9 };
68 std::vector<T> expectedOutput{ 5, 6, 6,
69 8, 9, 9,
70 8, 9, 9 };
71
72 std::map<int, std::vector<T>> inputTensorData = { { 0, inputData } };
73 std::map<int, std::vector<T>> expectedOutputData = { { 0, expectedOutput } };
74
75 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network), inputTensorData, expectedOutputData, backends);
76}
77
78template<armnn::DataType ArmnnType>
Teresa Charlina52bca22024-02-01 17:36:48 +000079void MaxPool2dEndToEndFloat16(const std::vector<armnn::BackendId>& backends,
80 PaddingMethod padMethod = PaddingMethod::Exclude)
Cathal Corbettbd18eab2022-11-15 12:56:16 +000081{
82 using namespace half_float::literal;
83 using Half = half_float::half;
84
85 const TensorShape& inputShape = { 1, 3, 3, 1 };
86 const TensorShape& outputShape = { 1, 3, 3, 1 };
87
Teresa Charlina52bca22024-02-01 17:36:48 +000088 INetworkPtr network = CreatePooling2dNetwork<ArmnnType>(inputShape, outputShape, padMethod);
Cathal Corbettbd18eab2022-11-15 12:56:16 +000089 CHECK(network);
90
91 std::vector<Half> inputData{ 1._h, 2._h, 3._h,
92 4._h, 5._h, 6._h,
93 7._h, 8._h, 9._h };
94 std::vector<Half> expectedOutput{ 5._h, 6._h, 6._h,
95 8._h, 9._h, 9._h,
96 8._h, 9._h, 9._h };
97
98 std::map<int, std::vector<Half>> inputTensorData = { { 0, inputData } };
99 std::map<int, std::vector<Half>> expectedOutputData = { { 0, expectedOutput } };
100
101 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network), inputTensorData, expectedOutputData, backends);
102}
103
104template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
105void AvgPool2dEndToEnd(const std::vector<armnn::BackendId>& backends,
106 PaddingMethod padMethod = PaddingMethod::Exclude)
107{
108 const TensorShape& inputShape = { 1, 3, 3, 1 };
109 const TensorShape& outputShape = { 1, 3, 3, 1 };
110
111 INetworkPtr network = CreatePooling2dNetwork<ArmnnType>(
112 inputShape, outputShape, padMethod, PoolingAlgorithm::Average);
113 CHECK(network);
114
115 std::vector<T> inputData{ 1, 2, 3,
116 4, 5, 6,
117 7, 8, 9 };
118 std::vector<T> expectedOutput;
119 if (padMethod == PaddingMethod::Exclude)
120 {
Teresa Charlin3fbad942022-12-15 10:35:37 +0000121 expectedOutput = { 3.f , 3.5f, 4.f ,
122 4.5f, 5.f , 5.5f,
123 6.f , 6.5f, 7.f };
Cathal Corbettbd18eab2022-11-15 12:56:16 +0000124 }
125 else
126 {
Teresa Charlin3fbad942022-12-15 10:35:37 +0000127 expectedOutput = { 1.33333f, 2.33333f, 1.77778f,
128 3.f , 5.f , 3.66667f,
129 2.66667f, 4.33333f, 3.11111f };
Cathal Corbettbd18eab2022-11-15 12:56:16 +0000130 }
131
132 std::map<int, std::vector<T>> inputTensorData = { { 0, inputData } };
133 std::map<int, std::vector<T>> expectedOutputData = { { 0, expectedOutput } };
134
135 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network),
136 inputTensorData,
137 expectedOutputData,
138 backends,
139 0.00001f);
140}
141
142template<armnn::DataType ArmnnType>
143void AvgPool2dEndToEndFloat16(const std::vector<armnn::BackendId>& backends,
Teresa Charlina52bca22024-02-01 17:36:48 +0000144 PaddingMethod padMethod = PaddingMethod::Exclude)
Cathal Corbettbd18eab2022-11-15 12:56:16 +0000145{
146 using namespace half_float::literal;
147 using Half = half_float::half;
148
149 const TensorShape& inputShape = { 1, 3, 3, 1 };
150 const TensorShape& outputShape = { 1, 3, 3, 1 };
151
152 INetworkPtr network = CreatePooling2dNetwork<ArmnnType>(
153 inputShape, outputShape, padMethod, PoolingAlgorithm::Average);
154 CHECK(network);
155
156 std::vector<Half> inputData{ 1._h, 2._h, 3._h,
157 4._h, 5._h, 6._h,
158 7._h, 8._h, 9._h };
Teresa Charlina52bca22024-02-01 17:36:48 +0000159 std::vector<Half> expectedOutput;
160 if (padMethod == PaddingMethod::Exclude)
161 {
162 expectedOutput = { 3._h , 3.5_h, 4._h ,
163 4.5_h, 5._h , 5.5_h,
164 6._h , 6.5_h, 7._h };
165 }
166 else
167 {
168 expectedOutput = { 1.33333_h, 2.33333_h, 1.77778_h,
169 3._h , 5._h , 3.66667_h,
170 2.66667_h, 4.33333_h, 3.11111_h };
171 }
Cathal Corbettbd18eab2022-11-15 12:56:16 +0000172
173 std::map<int, std::vector<Half>> inputTensorData = { { 0, inputData } };
174 std::map<int, std::vector<Half>> expectedOutputData = { { 0, expectedOutput } };
175
176 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network),
177 inputTensorData,
178 expectedOutputData,
179 backends,
180 0.00001f);
181}
182
Tianle Chengfa62fdc2023-12-15 12:38:40 +0000183template<typename armnn::DataType DataType>
184armnn::INetworkPtr CreateTwoLayerPooling2dNetwork(const armnn::TensorShape& inputShape,
185 const armnn::TensorShape& outputShape,
186 PaddingMethod padMethod = PaddingMethod::Exclude,
187 PoolingAlgorithm poolAlg = PoolingAlgorithm::Max,
188 const float qScale = 1.0f,
189 const int32_t qOffset = 0)
190{
191 INetworkPtr network(INetwork::Create());
192
193 TensorInfo inputTensorInfo(inputShape, DataType, qScale, qOffset, true);
194 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset, true);
195
196 Pooling2dDescriptor descriptor;
197 descriptor.m_PoolType = poolAlg;
198 descriptor.m_PoolWidth = descriptor.m_PoolHeight = 3;
199 descriptor.m_StrideX = descriptor.m_StrideY = 1;
200 descriptor.m_PadLeft = 1;
201 descriptor.m_PadRight = 1;
202 descriptor.m_PadTop = 1;
203 descriptor.m_PadBottom = 1;
204 descriptor.m_PaddingMethod = padMethod;
205 descriptor.m_DataLayout = DataLayout::NHWC;
206
207 IConnectableLayer* input = network->AddInputLayer(0, "input");
208 IConnectableLayer* pool1 = network->AddPooling2dLayer(descriptor, "pool_1");
209 IConnectableLayer* pool2 = network->AddPooling2dLayer(descriptor, "pool_2");
210 IConnectableLayer* output = network->AddOutputLayer(0, "output");
211
212 Connect(input, pool1, inputTensorInfo, 0, 0);
213 Connect(pool1, pool2, inputTensorInfo, 0, 0);
214 Connect(pool2, output, outputTensorInfo, 0, 0);
215
216 return network;
217}
218
219template<typename armnn::DataType DataType>
220armnn::INetworkPtr CreateThreeLayerPooling2dNetwork(const armnn::TensorShape& inputShape,
221 const armnn::TensorShape& outputShape,
222 PaddingMethod padMethod = PaddingMethod::Exclude,
223 PoolingAlgorithm poolAlg = PoolingAlgorithm::Max,
224 const float qScale = 1.0f,
225 const int32_t qOffset = 0)
226{
227 INetworkPtr network(INetwork::Create());
228
229 TensorInfo inputTensorInfo(inputShape, DataType, qScale, qOffset, true);
230 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset, true);
231
232 Pooling2dDescriptor descriptor;
233 descriptor.m_PoolType = poolAlg;
234 descriptor.m_PoolWidth = descriptor.m_PoolHeight = 3;
235 descriptor.m_StrideX = descriptor.m_StrideY = 1;
236 descriptor.m_PadLeft = 1;
237 descriptor.m_PadRight = 1;
238 descriptor.m_PadTop = 1;
239 descriptor.m_PadBottom = 1;
240 descriptor.m_PaddingMethod = padMethod;
241 descriptor.m_DataLayout = DataLayout::NHWC;
242
243 IConnectableLayer* input = network->AddInputLayer(0, "input");
244 IConnectableLayer* pool1 = network->AddPooling2dLayer(descriptor, "pool_1");
245 IConnectableLayer* pool2 = network->AddPooling2dLayer(descriptor, "pool_2");
246 IConnectableLayer* pool3 = network->AddPooling2dLayer(descriptor, "pool_3");
247 IConnectableLayer* output = network->AddOutputLayer(0, "output");
248
249 Connect(input, pool1, inputTensorInfo, 0, 0);
250 Connect(pool1, pool2, inputTensorInfo, 0, 0);
251 Connect(pool2, pool3, inputTensorInfo, 0, 0);
252 Connect(pool3, output, outputTensorInfo, 0, 0);
253
254 return network;
255}
256
257template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
258void MaxPool2dTwoLayerEndToEnd(const std::vector<armnn::BackendId>& backends,
259 PaddingMethod padMethod = PaddingMethod::Exclude)
260{
261 const TensorShape& inputShape = { 1, 3, 3, 1 };
262 const TensorShape& outputShape = { 1, 3, 3, 1 };
263
264 INetworkPtr network = CreateTwoLayerPooling2dNetwork<ArmnnType>(inputShape, outputShape, padMethod);
265
266 CHECK(network);
267
268 std::vector<T> inputData{ 1, 2, 3,
269 4, 5, 6,
270 7, 8, 9 };
271 std::vector<T> expectedOutput{ 9, 9, 9,
272 9, 9, 9,
273 9, 9, 9 };
274
275 std::map<int, std::vector<T>> inputTensorData = { { 0, inputData } };
276 std::map<int, std::vector<T>> expectedOutputData = { { 0, expectedOutput } };
277
278 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network), inputTensorData, expectedOutputData, backends);
279}
280
281template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
282void MaxPool2dThreeLayerEndToEnd(const std::vector<armnn::BackendId>& backends,
283 PaddingMethod padMethod = PaddingMethod::Exclude)
284{
285 const TensorShape& inputShape = { 1, 3, 3, 1 };
286 const TensorShape& outputShape = { 1, 3, 3, 1 };
287
288 INetworkPtr network = CreateThreeLayerPooling2dNetwork<ArmnnType>(inputShape, outputShape, padMethod);
289
290 CHECK(network);
291
292 std::vector<T> inputData{ 1, 2, 3,
293 4, 5, 6,
294 7, 8, 9 };
295 std::vector<T> expectedOutput{ 9, 9, 9,
296 9, 9, 9,
297 9, 9, 9 };
298
299 std::map<int, std::vector<T>> inputTensorData = { { 0, inputData } };
300 std::map<int, std::vector<T>> expectedOutputData = { { 0, expectedOutput } };
301
302 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network), inputTensorData, expectedOutputData, backends);
303}
304
Cathal Corbettbd18eab2022-11-15 12:56:16 +0000305} // anonymous namespace