blob: 42673d5b99914dc8dec35f76b690b7533895cc86 [file] [log] [blame]
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +00005
Aron Virginas-Tar00d306e2019-08-28 18:08:46 +01006#include "DequantizeTestImpl.hpp"
7
8#include <ResolveType.hpp>
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +00009
10#include <armnn/ArmNN.hpp>
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +000011
Aron Virginas-Tar00d306e2019-08-28 18:08:46 +010012#include <backendsCommon/test/TensorCopyUtils.hpp>
13#include <backendsCommon/test/WorkloadTestUtils.hpp>
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +000014
15#include <test/TensorHelpers.hpp>
16
17namespace
18{
19
20template<typename T, std::size_t Dim>
21LayerTestResult<float, Dim> DequantizeTestImpl(
22 armnn::IWorkloadFactory& workloadFactory,
23 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
24 const armnn::TensorInfo& inputTensorInfo,
25 const armnn::TensorInfo& outputTensorInfo,
26 const std::vector<T>& inputData,
27 const std::vector<float>& expectedOutputData,
28 armnn::DequantizeQueueDescriptor descriptor)
29{
30 boost::multi_array<T, Dim> input = MakeTensor<T, Dim>(inputTensorInfo, inputData);
31
32 LayerTestResult<float, Dim> ret(outputTensorInfo);
33 ret.outputExpected = MakeTensor<float, Dim>(outputTensorInfo, expectedOutputData);
34
35 std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
36 std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
37
38 armnn::WorkloadInfo info;
39 AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
40 AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
41
42 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateDequantize(descriptor, info);
43
44 inputHandle->Allocate();
45 outputHandle->Allocate();
46
47 CopyDataToITensorHandle(inputHandle.get(), input.data());
48
49 ExecuteWorkload(*workload, memoryManager);
50
51 CopyDataFromITensorHandle(ret.output.data(), outputHandle.get());
52
53 return ret;
54}
55
56template <armnn::DataType ArmnnInputType>
57LayerTestResult<float, 4> DequantizeSimpleTest(
58 armnn::IWorkloadFactory& workloadFactory,
59 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
60{
61 using T = armnn::ResolveType<ArmnnInputType>;
62
63 armnn::DequantizeQueueDescriptor desc;
64
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +010065 const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 0);
66 const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, armnn::DataType::Float32);
67
68 std::vector<T> inputData = std::vector<T>(
69 {
70 2, 4, 6,
71 8, 10, 12,
72 14, 16, 18,
73 20, 22, 24,
74 });
75
76 std::vector<float> expectedOutputData = std::vector<float>(
77 {
78 1.0f, 2.0f, 3.0f,
79 4.0f, 5.0f, 6.0f,
80 7.0f, 8.0f, 9.0f,
81 10.0f, 11.0f, 12.0f,
82 });
83
84 return DequantizeTestImpl<T, 4>(workloadFactory,
85 memoryManager,
86 inputTensorInfo,
87 outputTensorInfo,
88 inputData,
89 expectedOutputData,
90 desc);
91}
92
93template <armnn::DataType ArmnnInputType>
94LayerTestResult<float, 4> DequantizeOffsetTest(
95 armnn::IWorkloadFactory& workloadFactory,
96 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
97{
98 using T = armnn::ResolveType<ArmnnInputType>;
99
100 armnn::DequantizeQueueDescriptor desc;
101
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000102 const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 1);
103 const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, armnn::DataType::Float32);
104
105 std::vector<T> inputData = std::vector<T>(
106 {
107 3, 5, 7,
108 9, 11, 13,
109 15, 17, 19,
110 21, 23, 25,
111 });
112
113 std::vector<float> expectedOutputData = std::vector<float>(
114 {
115 1.0f, 2.0f, 3.0f,
116 4.0f, 5.0f, 6.0f,
117 7.0f, 8.0f, 9.0f,
118 10.0f, 11.0f, 12.0f,
119 });
120
121 return DequantizeTestImpl<T, 4>(workloadFactory,
122 memoryManager,
123 inputTensorInfo,
124 outputTensorInfo,
125 inputData,
126 expectedOutputData,
127 desc);
128}
129
130} // anonymous namespace
Aron Virginas-Tar00d306e2019-08-28 18:08:46 +0100131
132LayerTestResult<float, 4> DequantizeSimpleUint8Test(
133 armnn::IWorkloadFactory& workloadFactory,
134 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
135{
136 return DequantizeSimpleTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
137}
138
139LayerTestResult<float, 4> DequantizeOffsetUint8Test(
140 armnn::IWorkloadFactory& workloadFactory,
141 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
142{
143 return DequantizeOffsetTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
144}
145
146LayerTestResult<float, 4> DequantizeSimpleInt16Test(
147 armnn::IWorkloadFactory& workloadFactory,
148 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
149{
150 return DequantizeSimpleTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
151}