blob: e62415904cd9dd059805ea4bb9b5f745357de2ae [file] [log] [blame]
Narumol Prangnawarat8c7324d2019-05-31 16:42:11 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#pragma once
7
8#include "CommonTestUtils.hpp"
9
10#include <armnn/INetwork.hpp>
11#include <ResolveType.hpp>
12
13namespace
14{
15
16template<typename T>
17armnn::INetworkPtr CreateDequantizeNetwork(const armnn::TensorInfo& inputInfo,
18 const armnn::TensorInfo& outputInfo)
19{
20 armnn::INetworkPtr net(armnn::INetwork::Create());
21
22 armnn::IConnectableLayer* inputLayer = net->AddInputLayer(0);
23 armnn::IConnectableLayer* dequantizeLayer = net->AddDequantizeLayer("Dequantize");
24 armnn::IConnectableLayer* outputLayer = net->AddOutputLayer(0, "output");
25 Connect(inputLayer, dequantizeLayer, inputInfo, 0, 0);
26 Connect(dequantizeLayer, outputLayer, outputInfo, 0, 0);
27
28 return net;
29}
30
31template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
32void DequantizeEndToEndLayerTestImpl(const std::vector<BackendId>& backends,
33 const armnn::TensorShape& tensorShape,
34 const std::vector<T>& input,
35 const std::vector<float>& expectedOutput,
36 float scale,
37 int32_t offset)
38{
39 armnn::TensorInfo inputInfo(tensorShape, ArmnnType);
40 armnn::TensorInfo outputInfo(tensorShape, armnn::DataType::Float32);
41
42 inputInfo.SetQuantizationScale(scale);
43 inputInfo.SetQuantizationOffset(offset);
44
45 // Builds up the structure of the network
46 armnn::INetworkPtr net = CreateDequantizeNetwork<T>(inputInfo, outputInfo);
47
48 BOOST_TEST_CHECKPOINT("create a network");
49
50 std::map<int, std::vector<T>> inputTensorData = { { 0, input } };
51 std::map<int, std::vector<float>> expectedOutputData = { { 0, expectedOutput } };
52
53 EndToEndLayerTestImpl<ArmnnType, armnn::DataType::Float32>(
54 move(net), inputTensorData, expectedOutputData, backends);
55}
56
57template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
58void DequantizeEndToEndSimple(const std::vector<BackendId>& backends)
59{
60 const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
61 std::vector<T> inputData = std::vector<T>(
62 {
63 2, 4, 6, 8,
64 10, 12, 14, 16,
65 18, 20, 22, 24,
66 26, 28, 30, 32
67 });
68
69 std::vector<float> expectedOutputData = std::vector<float>(
70 {
71 1.0f, 2.0f, 3.0f, 4.0f,
72 5.0f, 6.0f, 7.0f, 8.0f,
73 9.0f, 10.0f, 11.0f, 12.0f,
74 13.0f, 14.0f, 15.0f, 16.0f
75 });
76 DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 0);
77};
78
79template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
80void DequantizeEndToEndOffset(const std::vector<BackendId>& backends)
81{
82 const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
83 std::vector<T> inputData = std::vector<T>(
84 {
85 3, 5, 7, 9,
86 11, 13, 15, 17,
87 19, 21, 23, 25,
88 27, 29, 31, 33
89 });
90
91 std::vector<float> expectedOutputData = std::vector<float>(
92 {
93 1.0f, 2.0f, 3.0f, 4.0f,
94 5.0f, 6.0f, 7.0f, 8.0f,
95 9.0f, 10.0f, 11.0f, 12.0f,
96 13.0f, 14.0f, 15.0f, 16.0f
97 });
98 DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 1);
99};
100
101} // anonymous namespace