blob: 5ceb8ae4b47f3b98665af619a3bf5a7c46609026 [file] [log] [blame]
telsoa01c577f2c2018-08-31 09:22:23 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa01c577f2c2018-08-31 09:22:23 +01004//
5
Sadik Armagana097d2a2021-11-24 15:47:28 +00006#include <CommonTestUtils.hpp>
Matteo Martincighbf0e7222019-06-20 17:17:45 +01007
David Beckac42efd2018-09-26 17:41:13 +01008#include <Graph.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01009
Colm Donelan0c479742021-12-10 12:43:54 +000010#include <armnn/backends/TensorHandle.hpp>
11#include <armnn/backends/WorkloadData.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010012
Sadik Armagan1625efc2021-06-10 18:24:34 +010013#include <doctest/doctest.h>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000014
15#include <utility>
telsoa01c577f2c2018-08-31 09:22:23 +010016
17using namespace armnn;
18using namespace std;
19
telsoa01c577f2c2018-08-31 09:22:23 +010020/////////////////////////////////////////////////////////////////////////////////////////////
21// The following test are created specifically to test ReleaseConstantData() method in the Layer
22// They build very simple graphs including the layer will be checked.
23// Checks weights and biases before the method called and after.
24/////////////////////////////////////////////////////////////////////////////////////////////
25
Sadik Armagan1625efc2021-06-10 18:24:34 +010026TEST_SUITE("LayerReleaseConstantDataTest")
27{
28TEST_CASE("ReleaseBatchNormalizationLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +010029{
Aron Virginas-Tar56055192018-11-12 18:10:43 +000030 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +010031
32 // create the layer we're testing
33 BatchNormalizationDescriptor layerDesc;
34 layerDesc.m_Eps = 0.05f;
35 BatchNormalizationLayer* const layer = graph.AddLayer<BatchNormalizationLayer>(layerDesc, "layer");
36
37 armnn::TensorInfo weightInfo({3}, armnn::DataType::Float32);
James Conroy1f58f032021-04-27 17:13:27 +010038 layer->m_Mean = std::make_unique<ScopedTensorHandle>(weightInfo);
39 layer->m_Variance = std::make_unique<ScopedTensorHandle>(weightInfo);
40 layer->m_Beta = std::make_unique<ScopedTensorHandle>(weightInfo);
41 layer->m_Gamma = std::make_unique<ScopedTensorHandle>(weightInfo);
telsoa01c577f2c2018-08-31 09:22:23 +010042 layer->m_Mean->Allocate();
43 layer->m_Variance->Allocate();
44 layer->m_Beta->Allocate();
45 layer->m_Gamma->Allocate();
46
47 // create extra layers
48 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
49 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
50
51 // connect up
52 armnn::TensorInfo tensorInfo({2, 3, 1, 1}, armnn::DataType::Float32);
53 Connect(input, layer, tensorInfo);
54 Connect(layer, output, tensorInfo);
55
56 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +010057 CHECK(layer->m_Mean != nullptr);
58 CHECK(layer->m_Variance != nullptr);
59 CHECK(layer->m_Beta != nullptr);
60 CHECK(layer->m_Gamma != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +010061
62 // free up the constants..
63 layer->ReleaseConstantData();
64
65 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +010066 CHECK(layer->m_Mean == nullptr);
67 CHECK(layer->m_Variance == nullptr);
68 CHECK(layer->m_Beta == nullptr);
69 CHECK(layer->m_Gamma == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +010070
71 }
72
Keith Davis2cddc722022-04-07 11:32:00 +010073TEST_CASE("ReleaseConvolution2dLayerConstantDataTest")
74{
75 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +010076
Keith Davis2cddc722022-04-07 11:32:00 +010077 // create the layer we're testing
78 Convolution2dDescriptor layerDesc;
79 layerDesc.m_PadLeft = 3;
80 layerDesc.m_PadRight = 3;
81 layerDesc.m_PadTop = 1;
82 layerDesc.m_PadBottom = 1;
83 layerDesc.m_StrideX = 2;
84 layerDesc.m_StrideY = 4;
85 layerDesc.m_BiasEnabled = true;
telsoa01c577f2c2018-08-31 09:22:23 +010086
Keith Davis2cddc722022-04-07 11:32:00 +010087 Convolution2dLayer* const layer = graph.AddLayer<Convolution2dLayer>(layerDesc, "layer");
telsoa01c577f2c2018-08-31 09:22:23 +010088
Keith Davis2cddc722022-04-07 11:32:00 +010089 layer->m_Weight = std::make_unique<ScopedTensorHandle>(TensorInfo({ 2, 3, 5, 3 },
90 armnn::DataType::Float32));
91 layer->m_Bias = std::make_unique<ScopedTensorHandle>
92 (TensorInfo({ 2 }, GetBiasDataType(armnn::DataType::Float32)));
telsoa01c577f2c2018-08-31 09:22:23 +010093
Keith Davis2cddc722022-04-07 11:32:00 +010094 layer->m_Weight->Allocate();
95 layer->m_Bias->Allocate();
telsoa01c577f2c2018-08-31 09:22:23 +010096
Keith Davis2cddc722022-04-07 11:32:00 +010097 ConstantLayer* weightsLayer = graph.AddLayer<ConstantLayer>("Weights");
98 ConstantLayer* biasLayer = graph.AddLayer<ConstantLayer>("Bias");
telsoa01c577f2c2018-08-31 09:22:23 +010099
Keith Davis2cddc722022-04-07 11:32:00 +0100100 weightsLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(TensorInfo({ 2, 3, 5, 3 },
101 armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100102
Keith Davis2cddc722022-04-07 11:32:00 +0100103 biasLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(
104 TensorInfo({2}, GetBiasDataType(armnn::DataType::Float32)));
telsoa01c577f2c2018-08-31 09:22:23 +0100105
Keith Davis2cddc722022-04-07 11:32:00 +0100106 TensorInfo weightsInfo = weightsLayer->m_LayerOutput->GetTensorInfo();
107 weightsInfo.SetConstant();
108 TensorInfo biasInfo = biasLayer->m_LayerOutput->GetTensorInfo();
109 biasInfo.SetConstant();
telsoa01c577f2c2018-08-31 09:22:23 +0100110
telsoa01c577f2c2018-08-31 09:22:23 +0100111
Keith Davis2cddc722022-04-07 11:32:00 +0100112 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
113 biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo);
114
115 // create extra layers
116 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
117 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
118
119 // connect up
120 Connect(input, layer, TensorInfo({ 2, 3, 8, 16 }, armnn::DataType::Float32));
121 weightsLayer->GetOutputSlot().Connect(layer->GetInputSlot(1));
122 biasLayer->GetOutputSlot().Connect(layer->GetInputSlot(2));
123 Connect(layer, output, TensorInfo({ 2, 2, 2, 10 }, armnn::DataType::Float32));
124
125 // check the constants that they are not NULL
126 CHECK(layer->m_Weight != nullptr);
127 CHECK(layer->m_Bias != nullptr);
128
129 // free up the constants..
130 layer->ReleaseConstantData();
131
132 // check the constants that they are NULL now
133 CHECK(layer->m_Weight == nullptr);
134 CHECK(layer->m_Bias == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100135}
136
Sadik Armagan1625efc2021-06-10 18:24:34 +0100137TEST_CASE("ReleaseDepthwiseConvolution2dLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +0100138{
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000139 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +0100140
141 // create the layer we're testing
142 DepthwiseConvolution2dDescriptor layerDesc;
143 layerDesc.m_PadLeft = 3;
144 layerDesc.m_PadRight = 3;
145 layerDesc.m_PadTop = 1;
146 layerDesc.m_PadBottom = 1;
147 layerDesc.m_StrideX = 2;
148 layerDesc.m_StrideY = 4;
149 layerDesc.m_BiasEnabled = true;
150
151 DepthwiseConvolution2dLayer* const layer = graph.AddLayer<DepthwiseConvolution2dLayer>(layerDesc, "layer");
152
Keith Davis2cddc722022-04-07 11:32:00 +0100153 layer->m_Weight = std::make_unique<ScopedTensorHandle>(
154 TensorInfo({3, 3, 5, 3}, DataType::Float32));
155 layer->m_Bias = std::make_unique<ScopedTensorHandle>(
156 TensorInfo({9}, DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100157 layer->m_Weight->Allocate();
158 layer->m_Bias->Allocate();
159
160 // create extra layers
161 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
162 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
163
164 // connect up
165 Connect(input, layer, TensorInfo({2, 3, 8, 16}, armnn::DataType::Float32));
166 Connect(layer, output, TensorInfo({2, 9, 2, 10}, armnn::DataType::Float32));
167
168 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +0100169 CHECK(layer->m_Weight != nullptr);
170 CHECK(layer->m_Bias != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100171
172 // free up the constants..
173 layer->ReleaseConstantData();
174
175 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +0100176 CHECK(layer->m_Weight == nullptr);
177 CHECK(layer->m_Bias == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100178}
179
Sadik Armagan1625efc2021-06-10 18:24:34 +0100180TEST_CASE("ReleaseFullyConnectedLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +0100181{
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000182 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +0100183
184 // create the layer we're testing
185 FullyConnectedDescriptor layerDesc;
186 layerDesc.m_BiasEnabled = true;
187 layerDesc.m_TransposeWeightMatrix = true;
188
189 FullyConnectedLayer* const layer = graph.AddLayer<FullyConnectedLayer>(layerDesc, "layer");
190
191 float inputsQScale = 1.0f;
192 float outputQScale = 2.0f;
193
Keith Davis2cddc722022-04-07 11:32:00 +0100194 layer->m_Weight = std::make_unique<ScopedTensorHandle>(
195 TensorInfo({7, 20}, DataType::QAsymmU8, inputsQScale, 0));
196 layer->m_Bias = std::make_unique<ScopedTensorHandle>(
197 TensorInfo({7}, GetBiasDataType(DataType::QAsymmU8), inputsQScale));
telsoa01c577f2c2018-08-31 09:22:23 +0100198 layer->m_Weight->Allocate();
199 layer->m_Bias->Allocate();
200
201 // create extra layers
202 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
203 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
204
205 // connect up
Derek Lambertif90c56d2020-01-10 17:14:08 +0000206 Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType::QAsymmU8, inputsQScale));
207 Connect(layer, output, TensorInfo({3, 7}, DataType::QAsymmU8, outputQScale));
telsoa01c577f2c2018-08-31 09:22:23 +0100208
209 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +0100210 CHECK(layer->m_Weight != nullptr);
211 CHECK(layer->m_Bias != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100212
213 // free up the constants..
214 layer->ReleaseConstantData();
215
216 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +0100217 CHECK(layer->m_Weight == nullptr);
218 CHECK(layer->m_Bias == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100219}
220
Sadik Armagan1625efc2021-06-10 18:24:34 +0100221}
telsoa01c577f2c2018-08-31 09:22:23 +0100222