blob: 846aa76298ff8fea99aba5b0056082d3eac5027e [file] [log] [blame]
Sadik Armagan062e0e92019-10-14 10:31:43 +01001//
2// Copyright © 2019 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "InstanceNormalizationEndToEndTestImpl.hpp"
Sadik Armagan062e0e92019-10-14 10:31:43 +01007#include "EndToEndTestImpl.hpp"
8#include "ResolveType.hpp"
9
Matteo Martincighe011d202019-11-28 11:35:47 +000010#include <armnnUtils/Permute.hpp>
11#include <armnnUtils/DataLayoutIndexed.hpp>
Sadik Armagan062e0e92019-10-14 10:31:43 +010012
13#include <armnn/INetwork.hpp>
14
Sadik Armagana097d2a2021-11-24 15:47:28 +000015#include <armnnTestUtils/DataLayoutUtils.hpp>
Sadik Armagan062e0e92019-10-14 10:31:43 +010016
Sadik Armagana097d2a2021-11-24 15:47:28 +000017#include <TestUtils.hpp>
Sadik Armagan062e0e92019-10-14 10:31:43 +010018
Sadik Armagan1625efc2021-06-10 18:24:34 +010019#include <doctest/doctest.h>
Sadik Armagan062e0e92019-10-14 10:31:43 +010020
21namespace
22{
23
24template<typename armnn::DataType DataType>
25armnn::INetworkPtr CreateInstanceNormalizationNetwork(const armnn::TensorShape& inputShape,
26 const armnn::TensorShape& outputShape,
27 const armnn::DataLayout dataLayout,
28 const float gamma,
29 const float beta,
30 const float eps,
31 const float qScale = 1.0f,
32 const int32_t qOffset = 0)
33{
34 using namespace armnn;
35
36 // Builds up the structure of the network.
37 INetworkPtr net(INetwork::Create());
38
Cathal Corbett5b8093c2021-10-22 11:12:07 +010039 TensorInfo inputTensorInfo(inputShape, DataType, qScale, qOffset, true);
Sadik Armagan062e0e92019-10-14 10:31:43 +010040
41 InstanceNormalizationDescriptor instanceNormalizationDesc;
42 instanceNormalizationDesc.m_Gamma = gamma;
43 instanceNormalizationDesc.m_Beta = beta;
44 instanceNormalizationDesc.m_Eps = eps;
45 instanceNormalizationDesc.m_DataLayout = dataLayout;
46
47 IConnectableLayer* instanceNormalization = net->AddInstanceNormalizationLayer(instanceNormalizationDesc,
48 "InstanceNormalization");
49 IConnectableLayer* input = net->AddInputLayer(0, "input");
50 Connect(input, instanceNormalization, inputTensorInfo, 0, 0);
51
52 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset);
53 IConnectableLayer* output = net->AddOutputLayer(0, "output");
54 Connect(instanceNormalization, output, outputTensorInfo, 0, 0);
55
56 return net;
57}
58
59void InstanceNormalizationEndToEnd(const std::vector<armnn::BackendId>& backends,
60 const armnn::DataLayout& dataLayout,
61 armnn::TensorInfo& inputTensorInfo,
62 armnn::TensorInfo& outputTensorInfo,
63 std::vector<float>& inputData,
64 std::vector<float>& expectedOutputData,
65 const float gamma,
66 const float beta,
67 const float eps)
68{
69 using namespace armnn;
70
71 if (dataLayout == DataLayout::NCHW)
72 {
73 PermuteTensorNhwcToNchw<float>(inputTensorInfo, inputData);
74 PermuteTensorNhwcToNchw<float>(outputTensorInfo, expectedOutputData);
75 }
76
77 // Builds up the structure of the network
78 INetworkPtr net = CreateInstanceNormalizationNetwork<DataType::Float32>(inputTensorInfo.GetShape(),
79 outputTensorInfo.GetShape(),
80 dataLayout,
81 gamma,
82 beta,
83 eps);
84
Sadik Armagan1625efc2021-06-10 18:24:34 +010085 CHECK(net);
Sadik Armagan062e0e92019-10-14 10:31:43 +010086
87 std::map<int, std::vector<float>> inputTensorData = { { 0, inputData } };
88 std::map<int, std::vector<float>> expectedOutputTensorData = { { 0, expectedOutputData } };
89
90 EndToEndLayerTestImpl<DataType::Float32, DataType::Float32>(move(net),
91 inputTensorData,
92 expectedOutputTensorData,
93 backends);
94}
95
96} // anonymous namespace
97
98void InstanceNormalizationNhwcEndToEndTest1(const std::vector<armnn::BackendId>& defaultBackends)
99{
100 using namespace armnn;
101
102 const float eps = 0.0001f;
103 const float beta = 0.0f;
104 const float gamma = 1.0f;
105
106 TensorShape inputShape{2, 2, 2, 2};
Cathal Corbett5b8093c2021-10-22 11:12:07 +0100107 TensorInfo inputTensorInfo(inputShape, DataType::Float32, 0.0f, 0, true);
Sadik Armagan062e0e92019-10-14 10:31:43 +0100108
109 TensorShape outputShape{2, 2, 2, 2};
110 TensorInfo outputTensorInfo(outputShape, DataType::Float32);
111
112 std::vector<float> inputData = std::vector<float>(
113 {
114 // Batch 0, Height 0, Width 0 x Channel (2)
115 0.f, 1.f,
116 // Batch 0, Height 0, Width 1 x Channel (2)
117 0.f, 2.f,
118
119 // Batch 0, Height 1, Width 0 x Channel (2)
120 0.f, 2.f,
121 // Batch 0, Height 1, Width 1 x Channel (2)
122 0.f, 4.f,
123
124 // Batch 1, Height 0, Width 0 x Channel (2)
125 1.f, -1.f,
126 // Batch 1, Height 0, Width 1 x Channel (2)
127 -1.f, 2.f,
128
129 // Batch 1, Height 1, Width 0 x Channel (2)
130 -1.f, -2.f,
131 // Batch 1, Height 1, Width 1 x Channel (2)
132 1.f, 4.f
133 });
134
135 std::vector<float> expectedOutputData = std::vector<float>(
136 {
137 // Batch 0, Height 0, Width 0 x Channel (2)
138 0.f, -1.1470304f,
139 // Batch 0, Height 0, Width 1 x Channel (2)
140 0.f, -0.22940612f,
141 // Batch 0, Height 1, Width 0 x Channel (2)
142 0.f, -0.22940612f,
143 // Batch 0, Height 1, Width 1 x Channel (2)
144 0.f, 1.6058424f,
145
146 // Batch 1, Height 0, Width 0 x Channel (2)
147 0.99995005f, -0.7337929f,
148 // Batch 1, Height 0, Width 1 x Channel (2)
149 -0.99995005f, 0.52413774f,
150
151 // Batch 1, Height 1, Width 0 x Channel (2)
152 -0.99995005f, -1.1531031f,
153 // Batch 1, Height 1, Width 1 x Channel (2)
154 0.99995005f, 1.3627582f
155 });
156
157 InstanceNormalizationEndToEnd(defaultBackends,
158 DataLayout::NHWC,
159 inputTensorInfo,
160 outputTensorInfo,
161 inputData,
162 expectedOutputData,
163 gamma,
164 beta,
165 eps);
166}
167
168void InstanceNormalizationNchwEndToEndTest1(const std::vector<armnn::BackendId>& defaultBackends)
169{
170 using namespace armnn;
171
172 const float eps = 0.0001f;
173 const float beta = 0.0f;
174 const float gamma = 1.0f;
175
176 TensorShape inputShape{2, 2, 2, 2};
Cathal Corbett5b8093c2021-10-22 11:12:07 +0100177 TensorInfo inputTensorInfo(inputShape, DataType::Float32, 0.0f, 0, true);
Sadik Armagan062e0e92019-10-14 10:31:43 +0100178
179 TensorShape outputShape{2, 2, 2, 2};
180 TensorInfo outputTensorInfo(outputShape, DataType::Float32);
181
182 std::vector<float> inputData = std::vector<float>(
183 {
184 // Batch 0, Height 0, Width 0 x Channel (2)
185 0.f, 1.f,
186 // Batch 0, Height 0, Width 1 x Channel (2)
187 0.f, 2.f,
188
189 // Batch 0, Height 1, Width 0 x Channel (2)
190 0.f, 2.f,
191 // Batch 0, Height 1, Width 1 x Channel (2)
192 0.f, 4.f,
193
194 // Batch 1, Height 0, Width 0 x Channel (2)
195 1.f, -1.f,
196 // Batch 1, Height 0, Width 1 x Channel (2)
197 -1.f, 2.f,
198
199 // Batch 1, Height 1, Width 0 x Channel (2)
200 -1.f, -2.f,
201 // Batch 1, Height 1, Width 1 x Channel (2)
202 1.f, 4.f
203 });
204
205 std::vector<float> expectedOutputData = std::vector<float>(
206 {
207 // Batch 0, Height 0, Width 0 x Channel (2)
208 0.f, -1.1470304f,
209 // Batch 0, Height 0, Width 1 x Channel (2)
210 0.f, -0.22940612f,
211 // Batch 0, Height 1, Width 0 x Channel (2)
212 0.f, -0.22940612f,
213 // Batch 0, Height 1, Width 1 x Channel (2)
214 0.f, 1.6058424f,
215
216 // Batch 1, Height 0, Width 0 x Channel (2)
217 0.99995005f, -0.7337929f,
218 // Batch 1, Height 0, Width 1 x Channel (2)
219 -0.99995005f, 0.52413774f,
220
221 // Batch 1, Height 1, Width 0 x Channel (2)
222 -0.99995005f, -1.1531031f,
223 // Batch 1, Height 1, Width 1 x Channel (2)
224 0.99995005f, 1.3627582f
225 });
226
227
228 InstanceNormalizationEndToEnd(defaultBackends,
229 DataLayout::NCHW,
230 inputTensorInfo,
231 outputTensorInfo,
232 inputData,
233 expectedOutputData,
234 gamma,
235 beta,
236 eps);
237}
238
239void InstanceNormalizationNhwcEndToEndTest2(const std::vector<armnn::BackendId>& defaultBackends)
240{
241 using namespace armnn;
242
243 const float eps = 0.0001f;
244 const float beta = 10.0f;
245 const float gamma = 2.0f;
246
247 TensorShape inputShape{2, 2, 2, 2};
248 TensorShape outputShape{2, 2, 2, 2};
249
250 TensorInfo outputTensorInfo(outputShape, DataType::Float32);
Cathal Corbett5b8093c2021-10-22 11:12:07 +0100251 TensorInfo inputTensorInfo(inputShape, DataType::Float32, 0.0f, 0, true);
Sadik Armagan062e0e92019-10-14 10:31:43 +0100252
253 std::vector<float> inputData = std::vector<float>(
254 {
255 // Batch 0, Height 0, Width 0 x Channel (2)
256 0.f, 1.f,
257 // Batch 0, Height 0, Width 1 x Channel (2)
258 0.f, 2.f,
259
260 // Batch 0, Height 1, Width 0 x Channel (2)
261 0.f, 2.f,
262 // Batch 0, Height 1, Width 1 x Channel (2)
263 0.f, 4.f,
264
265 // Batch 1, Height 0, Width 0 x Channel (2)
266 1.f, -1.f,
267 // Batch 1, Height 0, Width 1 x Channel (2)
268 -1.f, 2.f,
269
270 // Batch 1, Height 1, Width 0 x Channel (2)
271 -1.f, -2.f,
272 // Batch 1, Height 1, Width 1 x Channel (2)
273 1.f, 4.f
274 });
275
276 std::vector<float> expectedOutputData = std::vector<float>(
277 {
278 // Batch 0, Height 0, Width 0 x Channel (2)
279 10.f, 7.7059393f,
280 // Batch 0, Height 0, Width 1 x Channel (2)
281 10.f, 9.541187f,
282
283 // Batch 0, Height 1, Width 0 x Channel (2)
284 10.f, 9.541187f,
285 // Batch 0, Height 1, Width 1 x Channel (2)
286 10.f, 13.211685f,
287
288 // Batch 1, Height 0, Width 0 x Channel (2)
289 11.9999f, 8.532414f,
290 // Batch 1, Height 0, Width 1 x Channel (2)
291 8.0001f, 11.048275f,
292
293 // Batch 1, Height 1, Width 0 x Channel (2)
294 8.0001f, 7.693794f,
295 // Batch 1, Height 1, Width 1 x Channel (2)
296 11.9999f, 12.725516f
297 });
298
299 InstanceNormalizationEndToEnd(defaultBackends,
300 DataLayout::NHWC,
301 inputTensorInfo,
302 outputTensorInfo,
303 inputData,
304 expectedOutputData,
305 gamma,
306 beta,
307 eps);
308}
309
310void InstanceNormalizationNchwEndToEndTest2(const std::vector<armnn::BackendId>& defaultBackends)
311{
312 using namespace armnn;
313
314 const float eps = 0.0001f;
315 const float beta = 10.0f;
316 const float gamma = 2.0f;
317
318 TensorShape inputShape{2, 2, 2, 2};
319 TensorShape outputShape{2, 2, 2, 2};
320
321 TensorInfo outputTensorInfo(outputShape, DataType::Float32);
Cathal Corbett5b8093c2021-10-22 11:12:07 +0100322 TensorInfo inputTensorInfo(inputShape, DataType::Float32, 0.0f, 0, true);
Sadik Armagan062e0e92019-10-14 10:31:43 +0100323
324 std::vector<float> inputData = std::vector<float>(
325 {
326 // Batch 0, Height 0, Width 0 x Channel (2)
327 0.f, 1.f,
328 // Batch 0, Height 0, Width 1 x Channel (2)
329 0.f, 2.f,
330
331 // Batch 0, Height 1, Width 0 x Channel (2)
332 0.f, 2.f,
333 // Batch 0, Height 1, Width 1 x Channel (2)
334 0.f, 4.f,
335
336 // Batch 1, Height 0, Width 0 x Channel (2)
337 1.f, -1.f,
338 // Batch 1, Height 0, Width 1 x Channel (2)
339 -1.f, 2.f,
340
341 // Batch 1, Height 1, Width 0 x Channel (2)
342 -1.f, -2.f,
343 // Batch 1, Height 1, Width 1 x Channel (2)
344 1.f, 4.f
345 });
346
347 std::vector<float> expectedOutputData = std::vector<float>(
348 {
349 // Batch 0, Height 0, Width 0 x Channel (2)
350 10.f, 7.7059393f,
351 // Batch 0, Height 0, Width 1 x Channel (2)
352 10.f, 9.541187f,
353
354 // Batch 0, Height 1, Width 0 x Channel (2)
355 10.f, 9.541187f,
356 // Batch 0, Height 1, Width 1 x Channel (2)
357 10.f, 13.211685f,
358
359 // Batch 1, Height 0, Width 0 x Channel (2)
360 11.9999f, 8.532414f,
361 // Batch 1, Height 0, Width 1 x Channel (2)
362 8.0001f, 11.048275f,
363
364 // Batch 1, Height 1, Width 0 x Channel (2)
365 8.0001f, 7.693794f,
366 // Batch 1, Height 1, Width 1 x Channel (2)
367 11.9999f, 12.725516f
368 });
369
370 InstanceNormalizationEndToEnd(defaultBackends,
371 DataLayout::NCHW,
372 inputTensorInfo,
373 outputTensorInfo,
374 inputData,
375 expectedOutputData,
376 gamma,
377 beta,
378 eps);
Matteo Martincighe011d202019-11-28 11:35:47 +0000379}