blob: b7b514573ccfb216c8a192eb9e49dabcfcf475d3 [file] [log] [blame]
telsoa01c577f2c2018-08-31 09:22:23 +01001//
Mike Kellyec67a0f2022-11-25 13:55:24 +00002// Copyright © 2017,2022 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa01c577f2c2018-08-31 09:22:23 +01004//
5
Sadik Armagana097d2a2021-11-24 15:47:28 +00006#include <CommonTestUtils.hpp>
Matteo Martincighbf0e7222019-06-20 17:17:45 +01007
David Beckac42efd2018-09-26 17:41:13 +01008#include <Graph.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01009
Colm Donelan0c479742021-12-10 12:43:54 +000010#include <armnn/backends/TensorHandle.hpp>
11#include <armnn/backends/WorkloadData.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010012
Sadik Armagan1625efc2021-06-10 18:24:34 +010013#include <doctest/doctest.h>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000014
telsoa01c577f2c2018-08-31 09:22:23 +010015using namespace armnn;
16using namespace std;
17
telsoa01c577f2c2018-08-31 09:22:23 +010018/////////////////////////////////////////////////////////////////////////////////////////////
19// The following test are created specifically to test ReleaseConstantData() method in the Layer
20// They build very simple graphs including the layer will be checked.
21// Checks weights and biases before the method called and after.
22/////////////////////////////////////////////////////////////////////////////////////////////
23
Sadik Armagan1625efc2021-06-10 18:24:34 +010024TEST_SUITE("LayerReleaseConstantDataTest")
25{
26TEST_CASE("ReleaseBatchNormalizationLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +010027{
Aron Virginas-Tar56055192018-11-12 18:10:43 +000028 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +010029
30 // create the layer we're testing
31 BatchNormalizationDescriptor layerDesc;
32 layerDesc.m_Eps = 0.05f;
33 BatchNormalizationLayer* const layer = graph.AddLayer<BatchNormalizationLayer>(layerDesc, "layer");
34
35 armnn::TensorInfo weightInfo({3}, armnn::DataType::Float32);
James Conroy1f58f032021-04-27 17:13:27 +010036 layer->m_Mean = std::make_unique<ScopedTensorHandle>(weightInfo);
37 layer->m_Variance = std::make_unique<ScopedTensorHandle>(weightInfo);
38 layer->m_Beta = std::make_unique<ScopedTensorHandle>(weightInfo);
39 layer->m_Gamma = std::make_unique<ScopedTensorHandle>(weightInfo);
telsoa01c577f2c2018-08-31 09:22:23 +010040 layer->m_Mean->Allocate();
41 layer->m_Variance->Allocate();
42 layer->m_Beta->Allocate();
43 layer->m_Gamma->Allocate();
44
45 // create extra layers
46 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
47 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
48
49 // connect up
50 armnn::TensorInfo tensorInfo({2, 3, 1, 1}, armnn::DataType::Float32);
51 Connect(input, layer, tensorInfo);
52 Connect(layer, output, tensorInfo);
53
54 // check the constants that they are not NULL
Sadik Armagan1625efc2021-06-10 18:24:34 +010055 CHECK(layer->m_Mean != nullptr);
56 CHECK(layer->m_Variance != nullptr);
57 CHECK(layer->m_Beta != nullptr);
58 CHECK(layer->m_Gamma != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +010059
60 // free up the constants..
61 layer->ReleaseConstantData();
62
63 // check the constants that they are NULL now
Sadik Armagan1625efc2021-06-10 18:24:34 +010064 CHECK(layer->m_Mean == nullptr);
65 CHECK(layer->m_Variance == nullptr);
66 CHECK(layer->m_Beta == nullptr);
67 CHECK(layer->m_Gamma == nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +010068
69 }
70
Keith Davisb4dd5cc2022-04-07 11:32:00 +010071TEST_CASE("ReleaseConvolution2dLayerConstantDataTest")
72{
73 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +010074
Keith Davisb4dd5cc2022-04-07 11:32:00 +010075 // create the layer we're testing
76 Convolution2dDescriptor layerDesc;
77 layerDesc.m_PadLeft = 3;
78 layerDesc.m_PadRight = 3;
79 layerDesc.m_PadTop = 1;
80 layerDesc.m_PadBottom = 1;
81 layerDesc.m_StrideX = 2;
82 layerDesc.m_StrideY = 4;
83 layerDesc.m_BiasEnabled = true;
telsoa01c577f2c2018-08-31 09:22:23 +010084
Mike Kellyec67a0f2022-11-25 13:55:24 +000085 auto* const convolutionLayer = graph.AddLayer<Convolution2dLayer>(layerDesc, "convolution");
86 auto* const weightsLayer = graph.AddLayer<ConstantLayer>("weights");
87 auto* const biasLayer = graph.AddLayer<ConstantLayer>("bias");
telsoa01c577f2c2018-08-31 09:22:23 +010088
Mike Kellyec67a0f2022-11-25 13:55:24 +000089 TensorInfo weightsInfo = TensorInfo({ 2, 3, 5, 3 }, armnn::DataType::Float32, 1.0, 0.0, true);
90 TensorInfo biasInfo = TensorInfo({ 2 }, GetBiasDataType(armnn::DataType::Float32), 1.0, 0.0, true);
telsoa01c577f2c2018-08-31 09:22:23 +010091
Mike Kellyec67a0f2022-11-25 13:55:24 +000092 weightsLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(weightsInfo);
93 biasLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(biasInfo);
telsoa01c577f2c2018-08-31 09:22:23 +010094
Keith Davisb4dd5cc2022-04-07 11:32:00 +010095 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
96 biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo);
97
98 // create extra layers
99 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
100 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
101
102 // connect up
Mike Kellyec67a0f2022-11-25 13:55:24 +0000103 Connect(input, convolutionLayer, TensorInfo({ 2, 3, 8, 16 }, armnn::DataType::Float32));
104 weightsLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(1));
105 biasLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(2));
106 Connect(convolutionLayer, output, TensorInfo({ 2, 2, 2, 10 }, armnn::DataType::Float32));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100107
108 // check the constants that they are not NULL
Francis Murtaghd86f6c42022-05-09 17:00:21 +0100109 CHECK(weightsLayer->m_LayerOutput != nullptr);
110 CHECK(biasLayer->m_LayerOutput != nullptr);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100111
Mike Kellyec67a0f2022-11-25 13:55:24 +0000112 // free up the constants.
113 convolutionLayer->ReleaseConstantData();
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100114
Mike Kellyec67a0f2022-11-25 13:55:24 +0000115 // check the constants that they are still not NULL
116 CHECK(weightsLayer->m_LayerOutput != nullptr);
117 CHECK(biasLayer->m_LayerOutput != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100118}
119
Sadik Armagan1625efc2021-06-10 18:24:34 +0100120TEST_CASE("ReleaseDepthwiseConvolution2dLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +0100121{
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000122 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +0100123
124 // create the layer we're testing
125 DepthwiseConvolution2dDescriptor layerDesc;
126 layerDesc.m_PadLeft = 3;
127 layerDesc.m_PadRight = 3;
128 layerDesc.m_PadTop = 1;
129 layerDesc.m_PadBottom = 1;
130 layerDesc.m_StrideX = 2;
131 layerDesc.m_StrideY = 4;
132 layerDesc.m_BiasEnabled = true;
133
Mike Kellyec67a0f2022-11-25 13:55:24 +0000134 auto* const convolutionLayer = graph.AddLayer<DepthwiseConvolution2dLayer>(layerDesc, "convolution");
135 auto* const weightsLayer = graph.AddLayer<ConstantLayer>("weights");
136 auto* const biasLayer = graph.AddLayer<ConstantLayer>("bias");
telsoa01c577f2c2018-08-31 09:22:23 +0100137
Mike Kellyec67a0f2022-11-25 13:55:24 +0000138 TensorInfo weightsInfo = TensorInfo({ 3, 3, 5, 3 }, armnn::DataType::Float32, 1.0, 0.0, true);
139 TensorInfo biasInfo = TensorInfo({ 9 }, GetBiasDataType(armnn::DataType::Float32), 1.0, 0.0, true);
140
141 weightsLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(weightsInfo);
142 biasLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(biasInfo);
143
144 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
145 biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo);
telsoa01c577f2c2018-08-31 09:22:23 +0100146
147 // create extra layers
148 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
149 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
150
151 // connect up
Mike Kellyec67a0f2022-11-25 13:55:24 +0000152 Connect(input, convolutionLayer, TensorInfo({2, 3, 8, 16}, armnn::DataType::Float32));
153 weightsLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(1));
154 biasLayer->GetOutputSlot().Connect(convolutionLayer->GetInputSlot(2));
155 Connect(convolutionLayer, output, TensorInfo({2, 9, 2, 10}, armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100156
157 // check the constants that they are not NULL
Mike Kellyec67a0f2022-11-25 13:55:24 +0000158 CHECK(weightsLayer->m_LayerOutput != nullptr);
159 CHECK(biasLayer->m_LayerOutput != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100160
Mike Kellyec67a0f2022-11-25 13:55:24 +0000161 // free up the constants.
162 convolutionLayer->ReleaseConstantData();
telsoa01c577f2c2018-08-31 09:22:23 +0100163
Mike Kellyec67a0f2022-11-25 13:55:24 +0000164 // check the constants that they are still not NULL
165 CHECK(weightsLayer->m_LayerOutput != nullptr);
166 CHECK(biasLayer->m_LayerOutput != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100167}
168
Sadik Armagan1625efc2021-06-10 18:24:34 +0100169TEST_CASE("ReleaseFullyConnectedLayerConstantDataTest")
telsoa01c577f2c2018-08-31 09:22:23 +0100170{
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000171 Graph graph;
telsoa01c577f2c2018-08-31 09:22:23 +0100172
173 // create the layer we're testing
174 FullyConnectedDescriptor layerDesc;
175 layerDesc.m_BiasEnabled = true;
176 layerDesc.m_TransposeWeightMatrix = true;
177
Mike Kellyec67a0f2022-11-25 13:55:24 +0000178 auto* const fullyConnectedLayer = graph.AddLayer<FullyConnectedLayer>(layerDesc, "layer");
179 auto* const weightsLayer = graph.AddLayer<ConstantLayer>("weights");
180 auto* const biasLayer = graph.AddLayer<ConstantLayer>("bias");
telsoa01c577f2c2018-08-31 09:22:23 +0100181
182 float inputsQScale = 1.0f;
183 float outputQScale = 2.0f;
184
Mike Kellyec67a0f2022-11-25 13:55:24 +0000185 TensorInfo weightsInfo = TensorInfo({ 7, 20 }, DataType::QAsymmU8, inputsQScale, 0.0, true);
186 TensorInfo biasInfo = TensorInfo({ 7 }, GetBiasDataType(DataType::QAsymmU8), inputsQScale, 0.0, true);
187
188 weightsLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(weightsInfo);
189 biasLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(biasInfo);
190
191 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
192 biasLayer->GetOutputSlot(0).SetTensorInfo(biasInfo);
telsoa01c577f2c2018-08-31 09:22:23 +0100193
194 // create extra layers
195 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
196 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
197
198 // connect up
Mike Kellyec67a0f2022-11-25 13:55:24 +0000199 Connect(input, fullyConnectedLayer, TensorInfo({ 3, 1, 4, 5 }, DataType::QAsymmU8, inputsQScale));
200 weightsLayer->GetOutputSlot().Connect(fullyConnectedLayer->GetInputSlot(1));
201 biasLayer->GetOutputSlot().Connect(fullyConnectedLayer->GetInputSlot(2));
202 Connect(fullyConnectedLayer, output, TensorInfo({ 3, 7 }, DataType::QAsymmU8, outputQScale));
telsoa01c577f2c2018-08-31 09:22:23 +0100203
204 // check the constants that they are not NULL
Mike Kellyec67a0f2022-11-25 13:55:24 +0000205 CHECK(weightsLayer->m_LayerOutput != nullptr);
206 CHECK(biasLayer->m_LayerOutput != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100207
Mike Kellyec67a0f2022-11-25 13:55:24 +0000208 // free up the constants.
209 fullyConnectedLayer->ReleaseConstantData();
telsoa01c577f2c2018-08-31 09:22:23 +0100210
Mike Kellyec67a0f2022-11-25 13:55:24 +0000211 // check the constants that they are still not NULL
212 CHECK(weightsLayer->m_LayerOutput != nullptr);
213 CHECK(biasLayer->m_LayerOutput != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100214}
215
Sadik Armagan1625efc2021-06-10 18:24:34 +0100216}
telsoa01c577f2c2018-08-31 09:22:23 +0100217