blob: dfc7bfc18e58391867f27be1faa1f5a9549ec099 [file] [log] [blame]
Mike Kelly3ec30772023-03-08 13:47:17 +00001//
2// Copyright © 2023 Arm Ltd and contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include "CommonTestUtils.hpp"
8
9#include <ResolveType.hpp>
10
11#include <armnn/INetwork.hpp>
12#include <armnn/utility/NumericCast.hpp>
13
14#include <doctest/doctest.h>
15
16#include <vector>
17
18namespace
19{
20
21template<armnn::DataType ArmnnTypeInput>
22INetworkPtr CreateElementwiseBinaryNetwork(const TensorShape& input1Shape,
23 const TensorShape& input2Shape,
24 const TensorShape& outputShape,
25 BinaryOperation operation,
26 const float qScale = 1.0f,
27 const int32_t qOffset = 0)
28{
29 using namespace armnn;
30
31 INetworkPtr net(INetwork::Create());
32
33 TensorInfo input1TensorInfo(input1Shape, ArmnnTypeInput, qScale, qOffset, true);
34 TensorInfo input2TensorInfo(input2Shape, ArmnnTypeInput, qScale, qOffset, true);
35 TensorInfo outputTensorInfo(outputShape, ArmnnTypeInput, qScale, qOffset);
36
37 IConnectableLayer* input1 = net->AddInputLayer(armnn::numeric_cast<LayerBindingId>(0));
38 IConnectableLayer* input2 = net->AddInputLayer(armnn::numeric_cast<LayerBindingId>(1));
39 IConnectableLayer* elementwiseBinaryLayer = net->AddElementwiseBinaryLayer(operation, "elementwiseUnary");
40 IConnectableLayer* output = net->AddOutputLayer(0, "output");
41
42 Connect(input1, elementwiseBinaryLayer, input1TensorInfo, 0, 0);
43 Connect(input2, elementwiseBinaryLayer, input2TensorInfo, 0, 1);
44 Connect(elementwiseBinaryLayer, output, outputTensorInfo, 0, 0);
45
46 return net;
47}
48
49template<armnn::DataType ArmnnInType,
50 typename TInput = armnn::ResolveType<ArmnnInType>>
51void ElementwiseBinarySimpleEndToEnd(const std::vector<BackendId>& backends,
52 BinaryOperation operation)
53{
54 using namespace armnn;
55
56 const float qScale = IsQuantizedType<TInput>() ? 0.25f : 1.0f;
57 const int32_t qOffset = IsQuantizedType<TInput>() ? 50 : 0;
58
59 const TensorShape& input1Shape = { 2, 2, 2, 2 };
60 const TensorShape& input2Shape = { 1 };
61 const TensorShape& outputShape = { 2, 2, 2, 2 };
62
63 // Builds up the structure of the network
64 INetworkPtr net = CreateElementwiseBinaryNetwork<ArmnnInType>(input1Shape, input2Shape, outputShape,
65 operation, qScale, qOffset);
66
67 CHECK(net);
68
69 const std::vector<float> input1({ 1, -1, 1, 1, 5, -5, 5, 5, -3, 3, 3, 3, 4, 4, -4, 4 });
70
71 const std::vector<float> input2({ 2 });
72 std::vector<float> expectedOutput;
73 switch (operation) {
74 case armnn::BinaryOperation::Add:
75 expectedOutput = { 3, 1, 3, 3, 7, -3, 7, 7, -1, 5, 5, 5, 6, 6, -2, 6 };
76 break;
77 case armnn::BinaryOperation::Div:
78 expectedOutput = {0.5f, -0.5f, 0.5f, 0.5f, 2.5f, -2.5f, 2.5f, 2.5f, -1.5f, 1.5f, 1.5f, 1.5f, 2, 2, -2, 2};
79 break;
80 case armnn::BinaryOperation::Maximum:
81 expectedOutput = { 2, 2, 2, 2, 5, 2, 5, 5, 2, 3, 3, 3, 4, 4, 2, 4 };
82 break;
83 case armnn::BinaryOperation::Minimum:
84 expectedOutput = { 1, -1, 1, 1, 2, -5, 2, 2, -3, 2, 2, 2, 2, 2, -4, 2 };
85 break;
86 case armnn::BinaryOperation::Mul:
87 expectedOutput = { 2, -2, 2, 2, 10, -10, 10, 10, -6, 6, 6, 6, 8, 8, -8, 8 };
88 break;
89 case armnn::BinaryOperation::Sub:
90 expectedOutput = { -1, -3, -1, -1, 3, -7, 3, 3, -5, 1, 1, 1, 2, 2, -6, 2 };
91 break;
John Mcloughlin0ec00872023-05-15 17:03:49 +010092 case armnn::BinaryOperation::SqDiff:
93 expectedOutput = { 1, 9, 1, 1, 9, 49, 9, 9, 25, 1, 1, 1, 4, 4, 36, 4 };
94 break;
95 case armnn::BinaryOperation::Power:
96 expectedOutput = { 1, 1, 1, 1, 25, 25, 25, 25, 9, 9, 9, 9, 16, 16, 16, 16 };
97 break;
Mike Kelly3ec30772023-03-08 13:47:17 +000098 default:
99 throw("Invalid Elementwise Binary operation");
100 }
101 const std::vector<float> expectedOutput_const = expectedOutput;
102 // quantize data
103 std::vector<TInput> qInput1Data = armnnUtils::QuantizedVector<TInput>(input1, qScale, qOffset);
104 std::vector<TInput> qInput2Data = armnnUtils::QuantizedVector<TInput>(input2, qScale, qOffset);
105 std::vector<TInput> qExpectedOutput = armnnUtils::QuantizedVector<TInput>(expectedOutput_const, qScale, qOffset);
106
107 std::map<int, std::vector<TInput>> inputTensorData = {{ 0, qInput1Data }, { 1, qInput2Data }};
108 std::map<int, std::vector<TInput>> expectedOutputData = {{ 0, qExpectedOutput }};
109
110 EndToEndLayerTestImpl<ArmnnInType, ArmnnInType>(std::move(net), inputTensorData, expectedOutputData, backends);
111}
112
Tianle Cheng7790dc62023-12-12 13:52:22 +0000113template<armnn::DataType ArmnnInType,
114 typename TInput = armnn::ResolveType<ArmnnInType>>
115void ElementwiseBinarySimpleNoReshapeEndToEnd(const std::vector<BackendId>& backends,
116 BinaryOperation operation)
117{
118 using namespace armnn;
119
120 const float qScale = IsQuantizedType<TInput>() ? 0.25f : 1.0f;
121 const int32_t qOffset = IsQuantizedType<TInput>() ? 50 : 0;
122
123 const TensorShape& input1Shape = { 2, 2, 2, 2 };
124 const TensorShape& input2Shape = { 2, 2, 2, 2 };
125 const TensorShape& outputShape = { 2, 2, 2, 2 };
126
127 // Builds up the structure of the network
128 INetworkPtr net = CreateElementwiseBinaryNetwork<ArmnnInType>(input1Shape, input2Shape, outputShape,
129 operation, qScale, qOffset);
130
131 CHECK(net);
132
133 const std::vector<float> input1({ 1, -1, 1, 1, 5, -5, 5, 5, -3, 3, 3, 3, 4, 4, -4, 4 });
134
135 const std::vector<float> input2({ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 });
136
137 std::vector<float> expectedOutput;
138 switch (operation) {
139 case armnn::BinaryOperation::Add:
140 expectedOutput = { 3, 1, 3, 3, 7, -3, 7, 7, -1, 5, 5, 5, 6, 6, -2, 6 };
141 break;
142 case armnn::BinaryOperation::Div:
143 expectedOutput = {0.5f, -0.5f, 0.5f, 0.5f, 2.5f, -2.5f, 2.5f, 2.5f, -1.5f, 1.5f, 1.5f, 1.5f, 2, 2, -2, 2};
144 break;
145 case armnn::BinaryOperation::Maximum:
146 expectedOutput = { 2, 2, 2, 2, 5, 2, 5, 5, 2, 3, 3, 3, 4, 4, 2, 4 };
147 break;
148 case armnn::BinaryOperation::Minimum:
149 expectedOutput = { 1, -1, 1, 1, 2, -5, 2, 2, -3, 2, 2, 2, 2, 2, -4, 2 };
150 break;
151 case armnn::BinaryOperation::Mul:
152 expectedOutput = { 2, -2, 2, 2, 10, -10, 10, 10, -6, 6, 6, 6, 8, 8, -8, 8 };
153 break;
154 case armnn::BinaryOperation::Sub:
155 expectedOutput = { -1, -3, -1, -1, 3, -7, 3, 3, -5, 1, 1, 1, 2, 2, -6, 2 };
156 break;
157 case armnn::BinaryOperation::SqDiff:
158 expectedOutput = { 1, 9, 1, 1, 9, 49, 9, 9, 25, 1, 1, 1, 4, 4, 36, 4 };
159 break;
160 case armnn::BinaryOperation::Power:
161 expectedOutput = { 1, 1, 1, 1, 25, 25, 25, 25, 9, 9, 9, 9, 16, 16, 16, 16 };
162 break;
163 default:
164 throw("Invalid Elementwise Binary operation");
165 }
166
167 const std::vector<float> expectedOutput_const = expectedOutput;
168 // quantize data
169 std::vector<TInput> qInput1Data = armnnUtils::QuantizedVector<TInput>(input1, qScale, qOffset);
170 std::vector<TInput> qInput2Data = armnnUtils::QuantizedVector<TInput>(input2, qScale, qOffset);
171 std::vector<TInput> qExpectedOutput = armnnUtils::QuantizedVector<TInput>(expectedOutput_const, qScale, qOffset);
172
173 std::map<int, std::vector<TInput>> inputTensorData = {{ 0, qInput1Data }, { 1, qInput2Data }};
174 std::map<int, std::vector<TInput>> expectedOutputData = {{ 0, qExpectedOutput }};
175
176 EndToEndLayerTestImpl<ArmnnInType, ArmnnInType>(std::move(net), inputTensorData, expectedOutputData, backends);
177}
178
Mike Kelly3ec30772023-03-08 13:47:17 +0000179} // anonymous namespace