blob: 198de53595744bcfc3f48cc9d0dc7229a6aac0fa [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include <armnn/ArmNN.hpp>
8#include <armnn/Tensor.hpp>
9#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010
David Beckac42efd2018-09-26 17:41:13 +010011#include <test/TensorHelpers.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012#include "QuantizeHelper.hpp"
13
David Beckac42efd2018-09-26 17:41:13 +010014#include <backends/CpuTensorHandle.hpp>
15#include <backends/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000016
17template<typename T>
18LayerTestResult<T, 4> SimpleReshapeTestImpl(
19 armnn::IWorkloadFactory& workloadFactory,
20 armnn::TensorInfo inputTensorInfo,
21 armnn::TensorInfo outputTensorInfo,
22 const std::vector<T>& inputData,
23 const std::vector<T>& outputExpectedData)
24{
25 auto input = MakeTensor<T, 4>(inputTensorInfo, inputData);
26
27 LayerTestResult<T, 4> ret(outputTensorInfo);
28 ret.outputExpected = MakeTensor<T, 4>(outputTensorInfo, outputExpectedData);
29
30 std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
31 std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
32
33 armnn::ReshapeQueueDescriptor data;
34 armnn::WorkloadInfo info;
35 AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());
36 AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());
37
38 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateReshape(data, info);
39
40 inputHandle->Allocate();
41 outputHandle->Allocate();
42
43 CopyDataToITensorHandle(inputHandle.get(), &input[0][0][0][0]);
44
45 workload->Execute();
46
47 CopyDataFromITensorHandle(&ret.output[0][0][0][0], outputHandle.get());
48
49 return ret;
50}
51
52LayerTestResult<float, 4> SimpleReshapeFloat32Test(armnn::IWorkloadFactory& workloadFactory)
53{
54 armnn::TensorInfo inputTensorInfo;
55 armnn::TensorInfo outputTensorInfo;
56
57 unsigned int inputShape[] = { 2, 2, 3, 3 };
58 unsigned int outputShape[] = { 2, 2, 9, 1 };
59
60 inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32);
61 outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32);
62
63 std::vector<float> input = std::vector<float>(
64 {
65 0.0f, 1.0f, 2.0f,
66 3.0f, 4.0f, 5.0f,
67 6.0f, 7.0f, 8.0f,
68
69 9.0f, 10.0f, 11.0f,
70 12.0f, 13.0f, 14.0f,
71 15.0f, 16.0f, 17.0f,
72
73 18.0f, 19.0f, 20.0f,
74 21.0f, 22.0f, 23.0f,
75 24.0f, 25.0f, 26.0f,
76
77 27.0f, 28.0f, 29.0f,
78 30.0f, 31.0f, 32.0f,
79 33.0f, 34.0f, 35.0f,
80 });
81
82 std::vector<float> outputExpected = std::vector<float>(
83 {
84 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f,
85
86 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, 17.0f,
87
88 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f, 25.0f, 26.0f,
89
90 27.0f, 28.0f, 29.0f, 30.0f, 31.0f, 32.0f, 33.0f, 34.0f, 35.0f,
91 });
92
93 return SimpleReshapeTestImpl<float>(workloadFactory, inputTensorInfo, outputTensorInfo, input, outputExpected);
94}
95
96LayerTestResult<float, 4> SimpleFloorTest(armnn::IWorkloadFactory& workloadFactory)
97{
98 const armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, armnn::DataType::Float32);
99 const armnn::TensorInfo outputTensorInfo(inputTensorInfo);
100
101 auto input = MakeTensor<float, 4>(inputTensorInfo,
102 { -37.5f, -15.2f, -8.76f, -2.0f, -1.5f, -1.3f, -0.5f, -0.4f, 0.0f,
103 1.0f, 0.4f, 0.5f, 1.3f, 1.5f, 2.0f, 8.76f, 15.2f, 37.5f });
104
105 LayerTestResult<float, 4> ret(outputTensorInfo);
106 ret.outputExpected = MakeTensor<float, 4>(outputTensorInfo,
107 { -38.0f, -16.0f, -9.0f, -2.0f, -2.0f, -2.0f, -1.0f, -1.0f, 0.0f,
108 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 2.0f, 8.0f, 15.0f, 37.0f });
109
110 std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
111 std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
112
113 armnn::FloorQueueDescriptor data;
114 armnn::WorkloadInfo info;
115 AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());
116 AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());
117
118 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateFloor(data, info);
119
120 inputHandle->Allocate();
121 outputHandle->Allocate();
122
123 CopyDataToITensorHandle(inputHandle.get(), &input[0][0][0][0]);
124
125 workload->Execute();
126
127 CopyDataFromITensorHandle(&ret.output[0][0][0][0], outputHandle.get());
128
129 return ret;
130}
131
132LayerTestResult<uint8_t, 4> SimpleReshapeUint8Test(armnn::IWorkloadFactory& workloadFactory)
133{
134 armnn::TensorInfo inputTensorInfo;
135 armnn::TensorInfo outputTensorInfo;
136
137 unsigned int inputShape[] = { 2, 2, 3, 3 };
138 unsigned int outputShape[] = { 2, 2, 9, 1 };
139
140 inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::QuantisedAsymm8);
141 inputTensorInfo.SetQuantizationScale(1.0f);
142 outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::QuantisedAsymm8);
143 outputTensorInfo.SetQuantizationScale(1.0f);
144
145 std::vector<uint8_t> input = std::vector<uint8_t>(
146 {
147 0, 1, 2,
148 3, 4, 5,
149 6, 7, 8,
150
151 9, 10, 11,
152 12, 13, 14,
153 15, 16, 17,
154
155 18, 19, 20,
156 21, 22, 23,
157 24, 25, 26,
158
159 27, 28, 29,
160 30, 31, 32,
161 33, 34, 35,
162 });
163
164 std::vector<uint8_t> outputExpected = std::vector<uint8_t>(
165 {
166 0, 1, 2, 3, 4, 5, 6, 7, 8,
167
168 9, 10, 11, 12, 13, 14, 15, 16, 17,
169
170 18, 19, 20, 21, 22, 23, 24, 25, 26,
171
172 27, 28, 29, 30, 31, 32, 33, 34, 35,
173 });
174
175 return SimpleReshapeTestImpl<uint8_t>(workloadFactory, inputTensorInfo, outputTensorInfo, input, outputExpected);
176}