blob: abfb621c935c7c5e749cfbf24b457fbc4f1e9fc9 [file] [log] [blame]
telsoa01c577f2c2018-08-31 09:22:23 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa01c577f2c2018-08-31 09:22:23 +01004//
5
Sadik Armagana097d2a2021-11-24 15:47:28 +00006#include <CommonTestUtils.hpp>
Matteo Martincighbf0e7222019-06-20 17:17:45 +01007
David Beckac42efd2018-09-26 17:41:13 +01008#include <Graph.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01009
Colm Donelan0c479742021-12-10 12:43:54 +000010#include <armnn/backends/TensorHandle.hpp>
11#include <armnn/backends/WorkloadData.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010012
Sadik Armagan1625efc2021-06-10 18:24:34 +010013#include <doctest/doctest.h>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000014
15#include <utility>
telsoa01c577f2c2018-08-31 09:22:23 +010016
17using namespace armnn;
18using namespace std;
19
telsoa01c577f2c2018-08-31 09:22:23 +010020/////////////////////////////////////////////////////////////////////////////////////////////
21// The following test are created specifically to test ReleaseConstantData() method in the Layer
22// They build very simple graphs including the layer will be checked.
23// Checks weights and biases before the method called and after.
24/////////////////////////////////////////////////////////////////////////////////////////////
25
Sadik Armagan1625efc2021-06-10 18:24:34 +010026TEST_SUITE("LayerReleaseConstantDataTest")
27{
28TEST_CASE("ReleaseBatchNormalizationLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +010029{
Aron Virginas-Tar56055192018-11-12 18:10:43 +000030 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +010031
32 // create the layer we're testing
33 BatchNormalizationDescriptor layerDesc;
34 layerDesc.m_Eps = 0.05f;
35 BatchNormalizationLayer* const layer = graph.AddLayer<BatchNormalizationLayer>(layerDesc, "layer");
36
37 armnn::TensorInfo weightInfo({3}, armnn::DataType::Float32);
James Conroy1f58f032021-04-27 17:13:27 +010038 layer->m_Mean = std::make_unique<ScopedTensorHandle>(weightInfo);
39 layer->m_Variance = std::make_unique<ScopedTensorHandle>(weightInfo);
40 layer->m_Beta = std::make_unique<ScopedTensorHandle>(weightInfo);
41 layer->m_Gamma = std::make_unique<ScopedTensorHandle>(weightInfo);
telsoa01c577f2c2018-08-31 09:22:23 +010042 layer->m_Mean->Allocate();
43 layer->m_Variance->Allocate();
44 layer->m_Beta->Allocate();
45 layer->m_Gamma->Allocate();
46
47 // create extra layers
48 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
49 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
50
51 // connect up
52 armnn::TensorInfo tensorInfo({2, 3, 1, 1}, armnn::DataType::Float32);
53 Connect(input, layer, tensorInfo);
54 Connect(layer, output, tensorInfo);
55
56 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +010057 CHECK(layer->m_Mean != nullptr);
58 CHECK(layer->m_Variance != nullptr);
59 CHECK(layer->m_Beta != nullptr);
60 CHECK(layer->m_Gamma != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +010061
62 // free up the constants..
63 layer->ReleaseConstantData();
64
65 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +010066 CHECK(layer->m_Mean == nullptr);
67 CHECK(layer->m_Variance == nullptr);
68 CHECK(layer->m_Beta == nullptr);
69 CHECK(layer->m_Gamma == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +010070
71 }
72
Keith Davisb4dd5cc2022-04-07 11:32:00 +010073TEST_CASE("ReleaseConvolution2dLayerConstantDataTest")
74{
75 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +010076
Keith Davisb4dd5cc2022-04-07 11:32:00 +010077 // create the layer we're testing
78 Convolution2dDescriptor layerDesc;
79 layerDesc.m_PadLeft = 3;
80 layerDesc.m_PadRight = 3;
81 layerDesc.m_PadTop = 1;
82 layerDesc.m_PadBottom = 1;
83 layerDesc.m_StrideX = 2;
84 layerDesc.m_StrideY = 4;
85 layerDesc.m_BiasEnabled = true;
telsoa01c577f2c2018-08-31 09:22:23 +010086
Keith Davisb4dd5cc2022-04-07 11:32:00 +010087 Convolution2dLayer* const layer = graph.AddLayer<Convolution2dLayer>(layerDesc, "layer");
telsoa01c577f2c2018-08-31 09:22:23 +010088
Keith Davisb4dd5cc2022-04-07 11:32:00 +010089 layer->m_Weight = std::make_unique<ScopedTensorHandle>(TensorInfo({ 2, 3, 5, 3 },
90 armnn::DataType::Float32));
91 layer->m_Bias = std::make_unique<ScopedTensorHandle>
92 (TensorInfo({ 2 }, GetBiasDataType(armnn::DataType::Float32)));
telsoa01c577f2c2018-08-31 09:22:23 +010093
Keith Davisb4dd5cc2022-04-07 11:32:00 +010094 layer->m_Weight->Allocate();
95 layer->m_Bias->Allocate();
telsoa01c577f2c2018-08-31 09:22:23 +010096
Keith Davisb4dd5cc2022-04-07 11:32:00 +010097 ConstantLayer* weightsLayer = graph.AddLayer<ConstantLayer>("Weights");
98 ConstantLayer* biasLayer = graph.AddLayer<ConstantLayer>("Bias");
telsoa01c577f2c2018-08-31 09:22:23 +010099
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100100 weightsLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(TensorInfo({ 2, 3, 5, 3 },
101 armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100102
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100103 biasLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(
104 TensorInfo({2}, GetBiasDataType(armnn::DataType::Float32)));
telsoa01c577f2c2018-08-31 09:22:23 +0100105
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100106 TensorInfo weightsInfo = weightsLayer->m_LayerOutput->GetTensorInfo();
107 weightsInfo.SetConstant();
108 TensorInfo biasInfo = biasLayer->m_LayerOutput->GetTensorInfo();
109 biasInfo.SetConstant();
telsoa01c577f2c2018-08-31 09:22:23 +0100110
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100111 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
112 biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo);
113
114 // create extra layers
115 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
116 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
117
118 // connect up
119 Connect(input, layer, TensorInfo({ 2, 3, 8, 16 }, armnn::DataType::Float32));
120 weightsLayer->GetOutputSlot().Connect(layer->GetInputSlot(1));
121 biasLayer->GetOutputSlot().Connect(layer->GetInputSlot(2));
122 Connect(layer, output, TensorInfo({ 2, 2, 2, 10 }, armnn::DataType::Float32));
123
124 // check the constants that they are not NULL
Francis Murtaghd86f6c42022-05-09 17:00:21 +0100125 CHECK(weightsLayer->m_LayerOutput != nullptr);
126 CHECK(biasLayer->m_LayerOutput != nullptr);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100127
128 // free up the constants..
129 layer->ReleaseConstantData();
130
131 // check the constants that they are NULL now
Francis Murtaghd86f6c42022-05-09 17:00:21 +0100132 CHECK(weightsLayer->m_LayerOutput == nullptr);
133 CHECK(biasLayer->m_LayerOutput == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100134}
135
Sadik Armagan1625efc2021-06-10 18:24:34 +0100136TEST_CASE("ReleaseDepthwiseConvolution2dLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +0100137{
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000138 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +0100139
140 // create the layer we're testing
141 DepthwiseConvolution2dDescriptor layerDesc;
142 layerDesc.m_PadLeft = 3;
143 layerDesc.m_PadRight = 3;
144 layerDesc.m_PadTop = 1;
145 layerDesc.m_PadBottom = 1;
146 layerDesc.m_StrideX = 2;
147 layerDesc.m_StrideY = 4;
148 layerDesc.m_BiasEnabled = true;
149
150 DepthwiseConvolution2dLayer* const layer = graph.AddLayer<DepthwiseConvolution2dLayer>(layerDesc, "layer");
151
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100152 layer->m_Weight = std::make_unique<ScopedTensorHandle>(
153 TensorInfo({3, 3, 5, 3}, DataType::Float32));
154 layer->m_Bias = std::make_unique<ScopedTensorHandle>(
155 TensorInfo({9}, DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100156 layer->m_Weight->Allocate();
157 layer->m_Bias->Allocate();
158
159 // create extra layers
160 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
161 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
162
163 // connect up
164 Connect(input, layer, TensorInfo({2, 3, 8, 16}, armnn::DataType::Float32));
165 Connect(layer, output, TensorInfo({2, 9, 2, 10}, armnn::DataType::Float32));
166
167 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +0100168 CHECK(layer->m_Weight != nullptr);
169 CHECK(layer->m_Bias != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100170
171 // free up the constants..
172 layer->ReleaseConstantData();
173
174 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +0100175 CHECK(layer->m_Weight == nullptr);
176 CHECK(layer->m_Bias == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100177}
178
Sadik Armagan1625efc2021-06-10 18:24:34 +0100179TEST_CASE("ReleaseFullyConnectedLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +0100180{
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000181 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +0100182
183 // create the layer we're testing
184 FullyConnectedDescriptor layerDesc;
185 layerDesc.m_BiasEnabled = true;
186 layerDesc.m_TransposeWeightMatrix = true;
187
188 FullyConnectedLayer* const layer = graph.AddLayer<FullyConnectedLayer>(layerDesc, "layer");
189
190 float inputsQScale = 1.0f;
191 float outputQScale = 2.0f;
192
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100193 layer->m_Weight = std::make_unique<ScopedTensorHandle>(
194 TensorInfo({7, 20}, DataType::QAsymmU8, inputsQScale, 0));
195 layer->m_Bias = std::make_unique<ScopedTensorHandle>(
196 TensorInfo({7}, GetBiasDataType(DataType::QAsymmU8), inputsQScale));
telsoa01c577f2c2018-08-31 09:22:23 +0100197 layer->m_Weight->Allocate();
198 layer->m_Bias->Allocate();
199
200 // create extra layers
201 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
202 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
203
204 // connect up
Derek Lambertif90c56d2020-01-10 17:14:08 +0000205 Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType::QAsymmU8, inputsQScale));
206 Connect(layer, output, TensorInfo({3, 7}, DataType::QAsymmU8, outputQScale));
telsoa01c577f2c2018-08-31 09:22:23 +0100207
208 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +0100209 CHECK(layer->m_Weight != nullptr);
210 CHECK(layer->m_Bias != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100211
212 // free up the constants..
213 layer->ReleaseConstantData();
214
215 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +0100216 CHECK(layer->m_Weight == nullptr);
217 CHECK(layer->m_Bias == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100218}
219
Sadik Armagan1625efc2021-06-10 18:24:34 +0100220}
telsoa01c577f2c2018-08-31 09:22:23 +0100221