blob: 3b2c47fb94dc70c2476c6bc013e5cfe4ffe29450 [file] [log] [blame]
Idriss Chaouch98e383e2023-08-28 14:28:31 +01001//
2// Copyright © 2023 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6#include "armnn/INetwork.hpp"
7#include "armnnUtils/QuantizeHelper.hpp"
8#include "ElementwiseBinaryEndToEndTestImpl.hpp"
9#include "Optimizer.hpp"
10#include <CommonTestUtils.hpp>
11#include <ResolveType.hpp>
12#include <doctest/doctest.h>
13
14namespace
15{
16 using namespace armnn;
17 armnn::INetworkPtr CreateBroadcastToNetwork(BroadcastToDescriptor& descriptor,
18 const armnn::TensorInfo& inputInfo,
19 const armnn::TensorInfo& outputInfo)
20 {
21 INetworkPtr network(INetwork::Create());
22 IConnectableLayer* inputLayer = network->AddInputLayer(0, "input");
23 IConnectableLayer* broadcastLayer = network->AddBroadcastToLayer(descriptor, "broadcast_to");
24 IConnectableLayer* outputLayer = network->AddOutputLayer(0, "output");
25 Connect(inputLayer, broadcastLayer, inputInfo, 0, 0);
26 Connect(broadcastLayer, outputLayer, outputInfo, 0, 0);
27 return network;
28 }
29
30 armnn::INetworkPtr CreateBroadcastToNetworkWithElementWiseBinary(BroadcastToDescriptor& descriptor,
31 const ElementwiseBinaryDescriptor&
32 elementWiseDescriptor,
33 const armnn::TensorInfo& inputInfo,
34 const armnn::TensorInfo& inputInfoElementWise,
35 const armnn::TensorInfo& outputInfo)
36 {
37 INetworkPtr network(INetwork::Create());
38 IConnectableLayer* inputLayer = network->AddInputLayer(0, "input");
39 IConnectableLayer* inputLayerElementWise = network->AddInputLayer(1, "inputElementWiseBinary");
40 IConnectableLayer* broadcastLayer = network->AddBroadcastToLayer(descriptor, "broadcast_to");
41 IConnectableLayer* multiplicationLayer =
42 network->AddElementwiseBinaryLayer(elementWiseDescriptor,
43 "multiplication");
44 IConnectableLayer* outputLayer = network->AddOutputLayer(0, "output");
45 Connect(inputLayer, broadcastLayer, inputInfo, 0, 0);
46 Connect(inputLayerElementWise, multiplicationLayer,
47 inputInfoElementWise, 0, 1);
48 Connect(broadcastLayer, multiplicationLayer, inputInfo, 0, 0);
49 Connect(multiplicationLayer, outputLayer, outputInfo, 0, 0);
50 return network;
51 }
52
53 template <armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
54 void BroadcastToEndToEnd(const std::vector<BackendId>& backends)
55 {
56 float qScale = 1.0f;
57 int32_t qOffset = 0;
58 bool qConst = true;
59
60 const TensorShape inputTensorShape = { {1, 4} };
61 const TensorShape outputTensorShape = { {4, 4} };
62
63 TensorInfo inputInfo (inputTensorShape, ArmnnType, qScale,
64 qOffset, qConst);
65 TensorInfo outputInfo (outputTensorShape, ArmnnType,qScale,
66 qOffset);
67
68 std::vector<T> inputData = armnnUtils::QuantizedVector<T>({
69 65, 144, 91, 161
70 }, qScale, qOffset);
71
72 std::vector<T> expectedOutputData = armnnUtils::QuantizedVector<T>({
73 65, 144, 91, 161,
74 65, 144, 91, 161,
75 65, 144, 91, 161,
76 65, 144, 91, 161
77 }, qScale, qOffset);
78
79 auto descriptor = armnn::BroadcastToDescriptor(armnn::TensorShape({ 4, 4 }));
80 CHECK(descriptor.m_BroadcastToShape == outputTensorShape);
81 INetworkPtr network = CreateBroadcastToNetwork(descriptor, inputInfo, outputInfo);
82
83 std::map<int, std::vector<T>> inputTensor = { { 0, inputData } };
84 std::map<int, std::vector<T>> expectedOutputTensor = { { 0, expectedOutputData } };
85 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network),inputTensor,
86 expectedOutputTensor, backends);
87 }
88
89 template <armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
90 void BroadcastToEndToEndElementWiseBinary(const std::vector<BackendId>& backends)
91 {
92 float qScale = 1.0f;
93 int32_t qOffset = 0;
94 bool qConst = true;
95
96 const TensorShape inputTensorShape = { {1, 4} };
97 const TensorShape outputTensorShape = { {4, 4} };
98
99 const TensorInfo inputInfo (inputTensorShape, ArmnnType, qScale,
100 qOffset, qConst);
101 const TensorInfo inputInfoElementWise (outputTensorShape, ArmnnType, qScale,
102 qOffset, qConst);
103 const TensorInfo outputInfo (outputTensorShape, ArmnnType,qScale,
104 qOffset);
105
106 std::vector<T> inputData = armnnUtils::QuantizedVector<T>({
107 65, 144, 91, 161
108 }, qScale, qOffset);
109
110 std::vector<T> inputDataElementWise = armnnUtils::QuantizedVector<T>({
111 1, 1, 1, 1,
112 1, 1, 1, 1,
113 1, 1, 1, 1,
114 1, 1, 1, 1
115 }, qScale, qOffset);
116
117 std::vector<T> expectedOutputData = armnnUtils::QuantizedVector<T>({
118 65, 144, 91, 161,
119 65, 144, 91, 161,
120 65, 144, 91, 161,
121 65, 144, 91, 161
122 }, qScale, qOffset);
123
124 auto descriptor = armnn::BroadcastToDescriptor(armnn::TensorShape({ 4, 4 }));
125 CHECK(descriptor.m_BroadcastToShape == outputTensorShape);
126 INetworkPtr network = CreateBroadcastToNetworkWithElementWiseBinary(descriptor,
127 BinaryOperation::Mul,
128 inputInfo,
129 inputInfoElementWise,
130 outputInfo);
131 // Create ArmNN runtime
132 IRuntimePtr run = IRuntime::Create(IRuntime::CreationOptions());
133
134 // Optimise ArmNN network
135 IOptimizedNetworkPtr optNet = Optimize(*network, {Compute::CpuRef},
136 run->GetDeviceSpec());
137
138 Graph& graph = GetGraphForTesting(optNet.get());
139
140 Optimizer::Pass(graph,
141 armnn::MakeOptimizations(armnn::optimizations::BroadcastToOptimizationLayer()));
142
143 std::map<int, std::vector<T>> inputTensor = { { 0, inputData }, {1, inputDataElementWise} };
144 std::map<int, std::vector<T>> expectedOutputTensor = { { 0, expectedOutputData } };
145 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network),inputTensor,
146 expectedOutputTensor, backends);
147 }
148
149} // anonymous namespace