blob: ff76a38b08d4dbef60671e949420c94054be58e5 [file] [log] [blame]
Sadik Armagan9199e582019-09-05 17:35:31 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "AbsTestImpl.hpp"
7
8#include <armnn/ArmNN.hpp>
9
10#include <backendsCommon/test/DataTypeUtils.hpp>
11#include <backendsCommon/test/TensorCopyUtils.hpp>
12#include <backendsCommon/test/WorkloadTestUtils.hpp>
13
14#include <test/TensorHelpers.hpp>
15
16namespace
17{
18
19template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
20LayerTestResult<T, 2> Abs2dTestCommon(
21 armnn::IWorkloadFactory& workloadFactory,
22 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
23 const armnn::TensorInfo inputTensorInfo,
24 const armnn::TensorInfo outputTensorInfo,
25 const std::vector<float>& inputValues,
26 const std::vector<float>& expectedOutputValues)
27{
Derek Lambertic374ff02019-12-10 21:57:35 +000028 boost::ignore_unused(memoryManager);
Sadik Armagan9199e582019-09-05 17:35:31 +010029 auto inputTensor = MakeTensor<T, 2>(inputTensorInfo, ConvertToDataType<ArmnnType>(inputValues,inputTensorInfo));
30
31 LayerTestResult<T, 2> result(outputTensorInfo);
32
33 result.outputExpected = MakeTensor<T, 2>(outputTensorInfo,
34 ConvertToDataType<ArmnnType>(expectedOutputValues,outputTensorInfo));
35
36 std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
37 std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
38
39 armnn::AbsQueueDescriptor descriptor;
40
41 armnn::WorkloadInfo info;
42
43 AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
44 AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
45
46 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateAbs(descriptor, info);
47
48 inputHandle->Allocate();
49 outputHandle->Allocate();
50
51 CopyDataToITensorHandle(inputHandle.get(), &inputTensor[0][0]);
52
53 workload->PostAllocationConfigure();
54 workload->Execute();
55
56 CopyDataFromITensorHandle(&result.output[0][0], outputHandle.get());
57
58 return result;
59}
60
61} // anonymous namespace
62
63template<armnn::DataType ArmnnType, typename T>
64LayerTestResult<T, 2> Abs2dTest(
65 armnn::IWorkloadFactory& workloadFactory,
66 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
67{
68 const armnn::TensorShape inputShape{ 2, 2 };
69 const armnn::TensorShape outputShape{ 2, 2 };
70
71 float qScale = 0.0625f;
72 int32_t qOffset = 64;
73
74 if (ArmnnType == armnn::DataType::QuantisedSymm16)
75 {
76 qScale = 0.1f;
77 qOffset = 0;
78 }
79
80 armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType);
81 inputTensorInfo.SetQuantizationScale(qScale);
82 inputTensorInfo.SetQuantizationOffset(qOffset);
83
84 armnn::TensorInfo outputTensorInfo(outputShape, ArmnnType);
85 outputTensorInfo.SetQuantizationScale(qScale);
86 outputTensorInfo.SetQuantizationOffset(qOffset);
87
88 std::vector<float> inputValues
89 {
90 -0.1f, 0.2f,
91 0.3f, -0.4f
92 };
93
94 // Calculate output values for input.
95 auto f = [](float value)
96 {
97 return std::abs(value);
98 };
99 std::vector<float> expectedOutputValues(inputValues.size());
100 std::transform(inputValues.begin(), inputValues.end(), expectedOutputValues.begin(), f);
101
102 return Abs2dTestCommon<ArmnnType>(workloadFactory, memoryManager,
103 inputTensorInfo, outputTensorInfo,
104 inputValues, expectedOutputValues);
105}
106
107template<armnn::DataType ArmnnType, typename T>
108LayerTestResult<T, 3> Abs3dTest(
109 armnn::IWorkloadFactory& workloadFactory,
110 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
111{
Derek Lambertic374ff02019-12-10 21:57:35 +0000112 boost::ignore_unused(memoryManager);
113
Sadik Armagan9199e582019-09-05 17:35:31 +0100114 const armnn::TensorShape inputShape{ 3, 1, 2 };
115 const armnn::TensorShape outputShape{ 3, 1, 2 };
116
117 float qScale = 0.0625f;
118 int32_t qOffset = 64;
119
120 if (ArmnnType == armnn::DataType::QuantisedSymm16)
121 {
122 qScale = 0.1f;
123 qOffset = 0;
124 }
125
126 armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType);
127 inputTensorInfo.SetQuantizationScale(qScale);
128 inputTensorInfo.SetQuantizationOffset(qOffset);
129
130 armnn::TensorInfo outputTensorInfo(outputShape, ArmnnType);
131 outputTensorInfo.SetQuantizationScale(qScale);
132 outputTensorInfo.SetQuantizationOffset(qOffset);
133
134 std::vector<float> inputValues
135 {
136 -0.1f, -0.2f, -0.3f,
137 0.1f, 0.2f, 0.3f
138 };
139
140 auto f = [](float value)
141 {
142 return std::abs(value);
143 };
144 std::vector<float>expectedOutputValues(inputValues.size());
145 std::transform(inputValues.begin(), inputValues.end(), expectedOutputValues.begin(), f);
146
147 auto inputTensor = MakeTensor<T, 3>(inputTensorInfo, ConvertToDataType<ArmnnType>(inputValues,inputTensorInfo));
148
149 LayerTestResult<T, 3> result(outputTensorInfo);
150 result.outputExpected = MakeTensor<T, 3>(outputTensorInfo,
151 ConvertToDataType<ArmnnType>(expectedOutputValues,outputTensorInfo));
152
153 std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
154 std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
155
156 armnn::AbsQueueDescriptor descriptor;
157
158 armnn::WorkloadInfo info;
159
160 AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
161 AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
162
163 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateAbs(descriptor, info);
164
165 inputHandle->Allocate();
166 outputHandle->Allocate();
167
168 CopyDataToITensorHandle(inputHandle.get(), &inputTensor[0][0][0]);
169
170 workload->PostAllocationConfigure();
171 workload->Execute();
172
173 CopyDataFromITensorHandle(&result.output[0][0][0], outputHandle.get());
174
175 return result;
176}
177
178template<armnn::DataType ArmnnType, typename T>
179LayerTestResult<T, 2> AbsZeroTest(
180 armnn::IWorkloadFactory& workloadFactory,
181 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
182{
183 const armnn::TensorShape inputShape{ 1, 2 };
184 const armnn::TensorShape outputShape{ 1, 2 };
185
186 armnn::TensorInfo inputTensorInfo(inputShape, ArmnnType);
187 inputTensorInfo.SetQuantizationScale(0.1f);
188
189 armnn::TensorInfo outputTensorInfo(outputShape, ArmnnType);
190 outputTensorInfo.SetQuantizationScale(0.1f);
191
192 std::vector<float> inputValues
193 {
194 0.f, -0.f
195 };
196
197 std::vector<float> expectedOutputValues
198 {
199 0.f, 0.f
200 };
201
202 return Abs2dTestCommon<ArmnnType>(workloadFactory, memoryManager,
203 inputTensorInfo, outputTensorInfo,
204 inputValues, expectedOutputValues);
205}
206
207//
208// Explicit template specializations
209//
210
211template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 2>
212Abs2dTest<armnn::DataType::Float32>(
213 armnn::IWorkloadFactory& workloadFactory,
214 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
215
Kevin Mayb6482f32019-09-11 12:31:38 +0100216template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 2>
217Abs2dTest<armnn::DataType::Float16>(
218 armnn::IWorkloadFactory& workloadFactory,
219 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
220
Sadik Armagan9199e582019-09-05 17:35:31 +0100221template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 2>
222Abs2dTest<armnn::DataType::QuantisedAsymm8>(
223 armnn::IWorkloadFactory& workloadFactory,
224 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
225
226template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 2>
227Abs2dTest<armnn::DataType::QuantisedSymm16>(
228 armnn::IWorkloadFactory& workloadFactory,
229 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
230
231template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 3>
232Abs3dTest<armnn::DataType::Float32>(
233 armnn::IWorkloadFactory& workloadFactory,
234 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
235
Kevin Mayb6482f32019-09-11 12:31:38 +0100236template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 3>
237Abs3dTest<armnn::DataType::Float16>(
238 armnn::IWorkloadFactory& workloadFactory,
239 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
240
Sadik Armagan9199e582019-09-05 17:35:31 +0100241template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 3>
242Abs3dTest<armnn::DataType::QuantisedAsymm8>(
243 armnn::IWorkloadFactory& workloadFactory,
244 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
245
246template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 3>
247Abs3dTest<armnn::DataType::QuantisedSymm16>(
248 armnn::IWorkloadFactory& workloadFactory,
249 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
250
251template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 2>
252AbsZeroTest<armnn::DataType::Float32>(
253 armnn::IWorkloadFactory& workloadFactory,
Kevin Mayb6482f32019-09-11 12:31:38 +0100254 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
255
256template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 2>
257AbsZeroTest<armnn::DataType::Float16>(
258 armnn::IWorkloadFactory& workloadFactory,
Sadik Armagan9199e582019-09-05 17:35:31 +0100259 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);