blob: 4af479d7cef7d1ad881828c69156e09a65ed8a4c [file] [log] [blame]
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include "WorkloadTestUtils.hpp"
8
9#include <armnn/ArmNN.hpp>
10#include <armnn/Tensor.hpp>
11#include <armnn/TypesUtils.hpp>
12
13#include <backendsCommon/CpuTensorHandle.hpp>
14#include <backendsCommon/IBackendInternal.hpp>
15#include <backendsCommon/WorkloadFactory.hpp>
16
17#include <test/TensorHelpers.hpp>
18
19namespace
20{
21
22template<typename T, std::size_t Dim>
23LayerTestResult<T, Dim> DebugTestImpl(
24 armnn::IWorkloadFactory& workloadFactory,
25 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
26 armnn::TensorInfo& inputTensorInfo,
27 armnn::TensorInfo& outputTensorInfo,
28 std::vector<float>& inputData,
29 std::vector<float>& outputExpectedData,
30 armnn::DebugQueueDescriptor descriptor,
31 const std::string expectedStringOutput,
32 const float qScale = 1.0f,
33 const int32_t qOffset = 0)
34{
35 if(armnn::IsQuantizedType<T>())
36 {
37 inputTensorInfo.SetQuantizationScale(qScale);
38 inputTensorInfo.SetQuantizationOffset(qOffset);
39
40 outputTensorInfo.SetQuantizationScale(qScale);
41 outputTensorInfo.SetQuantizationOffset(qOffset);
42 }
43
44 boost::multi_array<T, Dim> input =
45 MakeTensor<T, Dim>(inputTensorInfo, QuantizedVector<T>(qScale, qOffset, inputData));
46
47 LayerTestResult<T, Dim> ret(outputTensorInfo);
48 ret.outputExpected =
49 MakeTensor<T, Dim>(outputTensorInfo, QuantizedVector<T>(qScale, qOffset, outputExpectedData));
50
51 std::unique_ptr<armnn::ITensorHandle> inputHandle =
52 workloadFactory.CreateTensorHandle(inputTensorInfo);
53
54 std::unique_ptr<armnn::ITensorHandle> outputHandle =
55 workloadFactory.CreateTensorHandle(outputTensorInfo);
56
57 armnn::WorkloadInfo info;
58 AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
59 AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
60
61 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateDebug(descriptor, info);
62
63 inputHandle->Allocate();
64 outputHandle->Allocate();
65
66 CopyDataToITensorHandle(inputHandle.get(), input.data());
Matteo Martincigh49124022019-01-11 13:25:59 +000067
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +000068 std::ostringstream oss;
69 std::streambuf* coutStreambuf = std::cout.rdbuf();
70 std::cout.rdbuf(oss.rdbuf());
71
72 ExecuteWorkload(*workload, memoryManager);
Matteo Martincigh49124022019-01-11 13:25:59 +000073
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +000074 std::cout.rdbuf(coutStreambuf);
Matteo Martincigh49124022019-01-11 13:25:59 +000075
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +000076 BOOST_TEST(oss.str() == expectedStringOutput);
77
78 CopyDataFromITensorHandle(ret.output.data(), outputHandle.get());
79
80 return ret;
81}
82
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +000083template <armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +000084LayerTestResult<T, 4> Debug4DTest(
85 armnn::IWorkloadFactory& workloadFactory,
86 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
87{
88 armnn::TensorInfo inputTensorInfo;
89 armnn::TensorInfo outputTensorInfo;
90
91 unsigned int inputShape[] = {1, 2, 2, 3};
92 unsigned int outputShape[] = {1, 2, 2, 3};
93
94 armnn::DebugQueueDescriptor desc;
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +000095 desc.m_Guid = 1;
96 desc.m_LayerName = "TestOutput";
97 desc.m_SlotIndex = 0;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +000098
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +000099 inputTensorInfo = armnn::TensorInfo(4, inputShape, ArmnnType);
100 outputTensorInfo = armnn::TensorInfo(4, outputShape, ArmnnType);
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000101
102 std::vector<float> input = std::vector<float>(
103 {
104 1.0f, 2.0f, 3.0f,
105 4.0f, 5.0f, 6.0f,
106 7.0f, 8.0f, 9.0f,
107 10.0f, 11.0f, 12.0f,
108 });
109
110 std::vector<float> outputExpected = std::vector<float>(
111 {
112 1.0f, 2.0f, 3.0f,
113 4.0f, 5.0f, 6.0f,
114 7.0f, 8.0f, 9.0f,
115 10.0f, 11.0f, 12.0f,
116 });
117
118 const std::string expectedStringOutput =
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000119 "{ \"layerGuid\": 1,"
120 " \"layerName\": \"TestOutput\","
121 " \"outputSlot\": 0,"
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000122 " \"shape\": [1, 2, 2, 3],"
123 " \"min\": 1, \"max\": 12,"
124 " \"data\": [[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]] }\n";
125
126 return DebugTestImpl<T, 4>(workloadFactory,
127 memoryManager,
128 inputTensorInfo,
129 outputTensorInfo,
130 input,
131 outputExpected,
132 desc,
133 expectedStringOutput);
134}
135
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000136template <armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000137LayerTestResult<T, 3> Debug3DTest(
138 armnn::IWorkloadFactory& workloadFactory,
139 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
140{
141 armnn::TensorInfo inputTensorInfo;
142 armnn::TensorInfo outputTensorInfo;
143
144 unsigned int inputShape[] = {3, 3, 1};
145 unsigned int outputShape[] = {3, 3, 1};
146
147 armnn::DebugQueueDescriptor desc;
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000148 desc.m_Guid = 1;
149 desc.m_LayerName = "TestOutput";
150 desc.m_SlotIndex = 0;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000151
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000152 inputTensorInfo = armnn::TensorInfo(3, inputShape, ArmnnType);
153 outputTensorInfo = armnn::TensorInfo(3, outputShape, ArmnnType);
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000154
155 std::vector<float> input = std::vector<float>(
156 {
157 1.0f, 2.0f, 3.0f,
158 4.0f, 5.0f, 6.0f,
159 7.0f, 8.0f, 9.0f,
160 });
161
162 std::vector<float> outputExpected = std::vector<float>(
163 {
164 1.0f, 2.0f, 3.0f,
165 4.0f, 5.0f, 6.0f,
166 7.0f, 8.0f, 9.0f,
167 });
168
169 const std::string expectedStringOutput =
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000170 "{ \"layerGuid\": 1,"
171 " \"layerName\": \"TestOutput\","
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000172 " \"outputSlot\": 0,"
173 " \"shape\": [3, 3, 1],"
174 " \"min\": 1, \"max\": 9,"
175 " \"data\": [[[1], [2], [3]], [[4], [5], [6]], [[7], [8], [9]]] }\n";
176
177 return DebugTestImpl<T, 3>(workloadFactory,
178 memoryManager,
179 inputTensorInfo,
180 outputTensorInfo,
181 input,
182 outputExpected,
183 desc,
184 expectedStringOutput);
185}
186
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000187template <armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000188LayerTestResult<T, 2> Debug2DTest(
189 armnn::IWorkloadFactory& workloadFactory,
190 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
191{
192 armnn::TensorInfo inputTensorInfo;
193 armnn::TensorInfo outputTensorInfo;
194
195 unsigned int inputShape[] = {2, 2};
196 unsigned int outputShape[] = {2, 2};
197
198 armnn::DebugQueueDescriptor desc;
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000199 desc.m_Guid = 1;
200 desc.m_LayerName = "TestOutput";
201 desc.m_SlotIndex = 0;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000202
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000203 inputTensorInfo = armnn::TensorInfo(2, inputShape, ArmnnType);
204 outputTensorInfo = armnn::TensorInfo(2, outputShape, ArmnnType);
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000205
206 std::vector<float> input = std::vector<float>(
207 {
208 1.0f, 2.0f,
209 3.0f, 4.0f,
210 });
211
212 std::vector<float> outputExpected = std::vector<float>(
213 {
214 1.0f, 2.0f,
215 3.0f, 4.0f,
216 });
217
218 const std::string expectedStringOutput =
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000219 "{ \"layerGuid\": 1,"
220 " \"layerName\": \"TestOutput\","
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000221 " \"outputSlot\": 0,"
222 " \"shape\": [2, 2],"
223 " \"min\": 1, \"max\": 4,"
224 " \"data\": [[1, 2], [3, 4]] }\n";
225
226 return DebugTestImpl<T, 2>(workloadFactory,
227 memoryManager,
228 inputTensorInfo,
229 outputTensorInfo,
230 input,
231 outputExpected,
232 desc,
233 expectedStringOutput);
234}
235
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000236template <armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000237LayerTestResult<T, 1> Debug1DTest(
238 armnn::IWorkloadFactory& workloadFactory,
239 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
240{
241 armnn::TensorInfo inputTensorInfo;
242 armnn::TensorInfo outputTensorInfo;
243
244 unsigned int inputShape[] = {4};
245 unsigned int outputShape[] = {4};
246
247 armnn::DebugQueueDescriptor desc;
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000248 desc.m_Guid = 1;
249 desc.m_LayerName = "TestOutput";
250 desc.m_SlotIndex = 0;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000251
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000252 inputTensorInfo = armnn::TensorInfo(1, inputShape, ArmnnType);
253 outputTensorInfo = armnn::TensorInfo(1, outputShape, ArmnnType);
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000254
255 std::vector<float> input = std::vector<float>(
256 {
257 1.0f, 2.0f, 3.0f, 4.0f,
258 });
259
260 std::vector<float> outputExpected = std::vector<float>(
261 {
262 1.0f, 2.0f, 3.0f, 4.0f,
263 });
264
265 const std::string expectedStringOutput =
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000266 "{ \"layerGuid\": 1,"
267 " \"layerName\": \"TestOutput\","
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000268 " \"outputSlot\": 0,"
269 " \"shape\": [4],"
270 " \"min\": 1, \"max\": 4,"
271 " \"data\": [1, 2, 3, 4] }\n";
272
273 return DebugTestImpl<T, 1>(workloadFactory,
274 memoryManager,
275 inputTensorInfo,
276 outputTensorInfo,
277 input,
278 outputExpected,
279 desc,
280 expectedStringOutput);
281}
282
283} // anonymous namespace