blob: c64c0a0d40d86a3003b1b6ed17db449593f35bff [file] [log] [blame]
telsoa01c577f2c2018-08-31 09:22:23 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa01c577f2c2018-08-31 09:22:23 +01004//
telsoa01c577f2c2018-08-31 09:22:23 +01005
Sadik Armagana097d2a2021-11-24 15:47:28 +00006#include <GraphUtils.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01007
Derek Lambertia9cca6a2019-03-25 15:41:58 +00008#include <armnn/LayerVisitorBase.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +00009
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000010#include <Network.hpp>
Aron Virginas-Tar70104002018-10-24 15:33:28 +010011
Sadik Armagan1625efc2021-06-10 18:24:34 +010012#include <doctest/doctest.h>
Aron Virginas-Tar70104002018-10-24 15:33:28 +010013
telsoa01c577f2c2018-08-31 09:22:23 +010014namespace
15{
16
17bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer)
18{
19 bool allConnected = true;
20 for (unsigned int i = 0; i < layer.GetNumInputSlots(); ++i)
21 {
22 const bool inputConnected = layer.GetInputSlot(i).GetConnection() != nullptr;
23 allConnected &= inputConnected;
24 }
25 return allConnected;
26}
27
28}
29
Sadik Armagan1625efc2021-06-10 18:24:34 +010030TEST_SUITE("Network")
31{
32TEST_CASE("LayerGuids")
telsoa01c577f2c2018-08-31 09:22:23 +010033{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000034 armnn::NetworkImpl net;
Cathal Corbett5aa9fd72022-02-25 15:33:28 +000035 LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
36 LayerGuid addId = net.AddAdditionLayer()->GetGuid();
37 LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
telsoa01c577f2c2018-08-31 09:22:23 +010038
Sadik Armagan1625efc2021-06-10 18:24:34 +010039 CHECK(inputId != addId);
40 CHECK(addId != outputId);
41 CHECK(inputId != outputId);
telsoa01c577f2c2018-08-31 09:22:23 +010042}
43
Sadik Armagan1625efc2021-06-10 18:24:34 +010044TEST_CASE("NetworkBasic")
telsoa01c577f2c2018-08-31 09:22:23 +010045{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000046 armnn::NetworkImpl net;
Sadik Armagan1625efc2021-06-10 18:24:34 +010047 CHECK(net.PrintGraph() == armnn::Status::Success);
telsoa01c577f2c2018-08-31 09:22:23 +010048}
49
Sadik Armagan1625efc2021-06-10 18:24:34 +010050TEST_CASE("LayerNamesAreOptionalForINetwork")
telsoa01c577f2c2018-08-31 09:22:23 +010051{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000052 armnn::INetworkPtr inet(armnn::INetwork::Create());
53 inet->AddInputLayer(0);
54 inet->AddAdditionLayer();
55 inet->AddActivationLayer(armnn::ActivationDescriptor());
56 inet->AddOutputLayer(0);
telsoa01c577f2c2018-08-31 09:22:23 +010057}
58
Sadik Armagan1625efc2021-06-10 18:24:34 +010059TEST_CASE("LayerNamesAreOptionalForNetwork")
telsoa01c577f2c2018-08-31 09:22:23 +010060{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000061 armnn::NetworkImpl net;
telsoa01c577f2c2018-08-31 09:22:23 +010062 net.AddInputLayer(0);
63 net.AddAdditionLayer();
64 net.AddActivationLayer(armnn::ActivationDescriptor());
65 net.AddOutputLayer(0);
66}
67
Sadik Armagan1625efc2021-06-10 18:24:34 +010068TEST_CASE("NetworkModification")
telsoa01c577f2c2018-08-31 09:22:23 +010069{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +000070 armnn::NetworkImpl net;
telsoa01c577f2c2018-08-31 09:22:23 +010071
72 armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +010073 CHECK(inputLayer);
telsoa01c577f2c2018-08-31 09:22:23 +010074
75 unsigned int dims[] = { 10,1,1,1 };
76 std::vector<float> convWeightsData(10);
Cathal Corbett5b8093c2021-10-22 11:12:07 +010077 armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32, 0.0f, 0, true), convWeightsData);
telsoa01c577f2c2018-08-31 09:22:23 +010078
79 armnn::Convolution2dDescriptor convDesc2d;
Matteo Martincighfc598e12019-05-14 10:36:13 +010080 armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d,
81 weights,
82 armnn::EmptyOptional(),
83 "conv layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +010084 CHECK(convLayer);
telsoa01c577f2c2018-08-31 09:22:23 +010085
86 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
87
88 armnn::FullyConnectedDescriptor fullyConnectedDesc;
Matthew Sloyan81beae32021-07-13 19:46:11 +010089
90 // Constant layer that now holds weights data for FullyConnected
91 armnn::IConnectableLayer* const constantWeightsLayer = net.AddConstantLayer(weights, "const weights");
telsoa01c577f2c2018-08-31 09:22:23 +010092 armnn::IConnectableLayer* const fullyConnectedLayer = net.AddFullyConnectedLayer(fullyConnectedDesc,
telsoa01c577f2c2018-08-31 09:22:23 +010093 "fully connected");
Matthew Sloyan81beae32021-07-13 19:46:11 +010094 CHECK(constantWeightsLayer);
Sadik Armagan1625efc2021-06-10 18:24:34 +010095 CHECK(fullyConnectedLayer);
telsoa01c577f2c2018-08-31 09:22:23 +010096
Matthew Sloyan81beae32021-07-13 19:46:11 +010097 constantWeightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
telsoa01c577f2c2018-08-31 09:22:23 +010098 convLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
99
100 armnn::Pooling2dDescriptor pooling2dDesc;
101 armnn::IConnectableLayer* const poolingLayer = net.AddPooling2dLayer(pooling2dDesc, "pooling2d");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100102 CHECK(poolingLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100103
104 fullyConnectedLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
105
106 armnn::ActivationDescriptor activationDesc;
107 armnn::IConnectableLayer* const activationLayer = net.AddActivationLayer(activationDesc, "activation");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100108 CHECK(activationLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100109
110 poolingLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
111
112 armnn::NormalizationDescriptor normalizationDesc;
113 armnn::IConnectableLayer* const normalizationLayer = net.AddNormalizationLayer(normalizationDesc, "normalization");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100114 CHECK(normalizationLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100115
116 activationLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
117
118 armnn::SoftmaxDescriptor softmaxDesc;
119 armnn::IConnectableLayer* const softmaxLayer = net.AddSoftmaxLayer(softmaxDesc, "softmax");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100120 CHECK(softmaxLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100121
122 normalizationLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
123
124 armnn::BatchNormalizationDescriptor batchNormDesc;
125
Cathal Corbett5b8093c2021-10-22 11:12:07 +0100126 armnn::TensorInfo tensorInfo({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
telsoa01c577f2c2018-08-31 09:22:23 +0100127 std::vector<float> data(tensorInfo.GetNumBytes() / sizeof(float));
128 armnn::ConstTensor invalidTensor(tensorInfo, data);
129
130 armnn::IConnectableLayer* const batchNormalizationLayer = net.AddBatchNormalizationLayer(batchNormDesc,
131 invalidTensor,
132 invalidTensor,
133 invalidTensor,
134 invalidTensor,
135 "batch norm");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100136 CHECK(batchNormalizationLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100137
138 softmaxLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
139
140 armnn::IConnectableLayer* const additionLayer = net.AddAdditionLayer("addition");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100141 CHECK(additionLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100142
143 batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
144 batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
145
146 armnn::IConnectableLayer* const multiplicationLayer = net.AddMultiplicationLayer("multiplication");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100147 CHECK(multiplicationLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100148
149 additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
150 additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
151
152 armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100153 CHECK(outputLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100154
155 multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
156
157 //Tests that all layers are present in the graph.
Matthew Sloyan81beae32021-07-13 19:46:11 +0100158 CHECK(net.GetGraph().GetNumLayers() == 12);
telsoa01c577f2c2018-08-31 09:22:23 +0100159
160 //Tests that the vertices exist and have correct names.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100161 CHECK(GraphHasNamedLayer(net.GetGraph(), "input layer"));
162 CHECK(GraphHasNamedLayer(net.GetGraph(), "conv layer"));
Matthew Sloyan81beae32021-07-13 19:46:11 +0100163 CHECK(GraphHasNamedLayer(net.GetGraph(), "const weights"));
Sadik Armagan1625efc2021-06-10 18:24:34 +0100164 CHECK(GraphHasNamedLayer(net.GetGraph(), "fully connected"));
165 CHECK(GraphHasNamedLayer(net.GetGraph(), "pooling2d"));
166 CHECK(GraphHasNamedLayer(net.GetGraph(), "activation"));
167 CHECK(GraphHasNamedLayer(net.GetGraph(), "normalization"));
168 CHECK(GraphHasNamedLayer(net.GetGraph(), "softmax"));
169 CHECK(GraphHasNamedLayer(net.GetGraph(), "batch norm"));
170 CHECK(GraphHasNamedLayer(net.GetGraph(), "addition"));
171 CHECK(GraphHasNamedLayer(net.GetGraph(), "multiplication"));
172 CHECK(GraphHasNamedLayer(net.GetGraph(), "output layer"));
telsoa01c577f2c2018-08-31 09:22:23 +0100173
174 auto checkOneOutputToOneInputConnection = []
175 (const armnn::IConnectableLayer* const srcLayer,
176 const armnn::IConnectableLayer* const tgtLayer,
177 int expectedSrcNumInputs = 1,
178 int expectedDstNumOutputs = 1)
179 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100180 CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
181 CHECK(srcLayer->GetNumOutputSlots() == 1);
182 CHECK(tgtLayer->GetNumInputSlots() == 1);
183 CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
telsoa01c577f2c2018-08-31 09:22:23 +0100184
Sadik Armagan1625efc2021-06-10 18:24:34 +0100185 CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 1);
186 CHECK(srcLayer->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
187 CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
telsoa01c577f2c2018-08-31 09:22:23 +0100188 };
189 auto checkOneOutputToTwoInputsConnections = []
190 (const armnn::IConnectableLayer* const srcLayer,
191 const armnn::IConnectableLayer* const tgtLayer,
192 int expectedSrcNumInputs,
193 int expectedDstNumOutputs = 1)
194 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100195 CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
196 CHECK(srcLayer->GetNumOutputSlots() == 1);
197 CHECK(tgtLayer->GetNumInputSlots() == 2);
198 CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
telsoa01c577f2c2018-08-31 09:22:23 +0100199
Sadik Armagan1625efc2021-06-10 18:24:34 +0100200 CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 2);
telsoa01c577f2c2018-08-31 09:22:23 +0100201 for (unsigned int i = 0; i < srcLayer->GetOutputSlot(0).GetNumConnections(); ++i)
202 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100203 CHECK(srcLayer->GetOutputSlot(0).GetConnection(i) == &tgtLayer->GetInputSlot(i));
204 CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(i).GetConnection());
telsoa01c577f2c2018-08-31 09:22:23 +0100205 }
206 };
Matthew Sloyan81beae32021-07-13 19:46:11 +0100207 auto checkOneOutputToTwoInputConnectionForTwoDifferentLayers = []
208 (const armnn::IConnectableLayer* const srcLayer1,
209 const armnn::IConnectableLayer* const srcLayer2,
210 const armnn::IConnectableLayer* const tgtLayer,
211 int expectedSrcNumInputs1 = 1,
212 int expectedSrcNumInputs2 = 1,
213 int expectedDstNumOutputs = 1)
214 {
215 CHECK(srcLayer1->GetNumInputSlots() == expectedSrcNumInputs1);
216 CHECK(srcLayer1->GetNumOutputSlots() == 1);
217 CHECK(srcLayer2->GetNumInputSlots() == expectedSrcNumInputs2);
218 CHECK(srcLayer2->GetNumOutputSlots() == 1);
219 CHECK(tgtLayer->GetNumInputSlots() == 2);
220 CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
221
222 CHECK(srcLayer1->GetOutputSlot(0).GetNumConnections() == 1);
223 CHECK(srcLayer2->GetOutputSlot(0).GetNumConnections() == 1);
224 CHECK(srcLayer1->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
225 CHECK(srcLayer2->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(1));
226 CHECK(&srcLayer1->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
227 CHECK(&srcLayer2->GetOutputSlot(0) == tgtLayer->GetInputSlot(1).GetConnection());
228 };
telsoa01c577f2c2018-08-31 09:22:23 +0100229
Sadik Armagan1625efc2021-06-10 18:24:34 +0100230 CHECK(AreAllLayerInputSlotsConnected(*convLayer));
231 CHECK(AreAllLayerInputSlotsConnected(*fullyConnectedLayer));
232 CHECK(AreAllLayerInputSlotsConnected(*poolingLayer));
233 CHECK(AreAllLayerInputSlotsConnected(*activationLayer));
234 CHECK(AreAllLayerInputSlotsConnected(*normalizationLayer));
235 CHECK(AreAllLayerInputSlotsConnected(*softmaxLayer));
236 CHECK(AreAllLayerInputSlotsConnected(*batchNormalizationLayer));
237 CHECK(AreAllLayerInputSlotsConnected(*additionLayer));
238 CHECK(AreAllLayerInputSlotsConnected(*multiplicationLayer));
239 CHECK(AreAllLayerInputSlotsConnected(*outputLayer));
telsoa01c577f2c2018-08-31 09:22:23 +0100240
241 // Checks connectivity.
242 checkOneOutputToOneInputConnection(inputLayer, convLayer, 0);
Matthew Sloyan81beae32021-07-13 19:46:11 +0100243 checkOneOutputToTwoInputConnectionForTwoDifferentLayers(convLayer, constantWeightsLayer, fullyConnectedLayer, 1, 0);
244 checkOneOutputToOneInputConnection(fullyConnectedLayer, poolingLayer, 2, 1);
telsoa01c577f2c2018-08-31 09:22:23 +0100245 checkOneOutputToOneInputConnection(poolingLayer, activationLayer);
246 checkOneOutputToOneInputConnection(activationLayer, normalizationLayer);
247 checkOneOutputToOneInputConnection(normalizationLayer, softmaxLayer);
248 checkOneOutputToOneInputConnection(softmaxLayer, batchNormalizationLayer);
249 checkOneOutputToTwoInputsConnections(batchNormalizationLayer, additionLayer, 1);
250 checkOneOutputToTwoInputsConnections(additionLayer, multiplicationLayer, 2);
251 checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0);
252}
253
Sadik Armagan1625efc2021-06-10 18:24:34 +0100254TEST_CASE("NetworkModification_SplitterConcat")
telsoa01c577f2c2018-08-31 09:22:23 +0100255{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000256 armnn::NetworkImpl net;
telsoa01c577f2c2018-08-31 09:22:23 +0100257
258 // Adds an input layer and an input tensor descriptor.
259 armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100260 CHECK(inputLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100261
262 // Adds a splitter layer.
263 armnn::ViewsDescriptor splitterDesc(2,4);
264
265 armnn::IConnectableLayer* splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100266 CHECK(splitterLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100267
268 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
269
270 // Adds a softmax layer 1.
271 armnn::SoftmaxDescriptor softmaxDescriptor;
272 armnn::IConnectableLayer* softmaxLayer1 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100273 CHECK(softmaxLayer1);
telsoa01c577f2c2018-08-31 09:22:23 +0100274
275 splitterLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
276
277 // Adds a softmax layer 2.
278 armnn::IConnectableLayer* softmaxLayer2 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100279 CHECK(softmaxLayer2);
telsoa01c577f2c2018-08-31 09:22:23 +0100280
281 splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0));
282
Jim Flynne242f2d2019-05-22 14:24:13 +0100283 // Adds a concat layer.
284 armnn::OriginsDescriptor concatDesc(2, 4);
telsoa01c577f2c2018-08-31 09:22:23 +0100285
Jim Flynne242f2d2019-05-22 14:24:13 +0100286 armnn::IConnectableLayer* concatLayer = net.AddConcatLayer(concatDesc, "concat layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100287 CHECK(concatLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100288
Jim Flynne242f2d2019-05-22 14:24:13 +0100289 softmaxLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
290 softmaxLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
telsoa01c577f2c2018-08-31 09:22:23 +0100291
292 // Adds an output layer.
293 armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100294 CHECK(outputLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100295
Jim Flynne242f2d2019-05-22 14:24:13 +0100296 concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
telsoa01c577f2c2018-08-31 09:22:23 +0100297
Sadik Armagan1625efc2021-06-10 18:24:34 +0100298 CHECK(splitterLayer->GetNumOutputSlots() == 2);
299 CHECK(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0));
300 CHECK(&splitterLayer->GetOutputSlot(0) == softmaxLayer1->GetInputSlot(0).GetConnection());
301 CHECK(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0));
302 CHECK(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection());
telsoa01c577f2c2018-08-31 09:22:23 +0100303
Sadik Armagan1625efc2021-06-10 18:24:34 +0100304 CHECK(concatLayer->GetNumInputSlots() == 2);
305 CHECK(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(0));
306 CHECK(&softmaxLayer1->GetOutputSlot(0) == concatLayer->GetInputSlot(0).GetConnection());
307 CHECK(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(1));
308 CHECK(&softmaxLayer2->GetOutputSlot(0) == concatLayer->GetInputSlot(1).GetConnection());
telsoa01c577f2c2018-08-31 09:22:23 +0100309}
310
Sadik Armagan1625efc2021-06-10 18:24:34 +0100311TEST_CASE("NetworkModification_SplitterAddition")
telsoa01c577f2c2018-08-31 09:22:23 +0100312{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000313 armnn::NetworkImpl net;
telsoa01c577f2c2018-08-31 09:22:23 +0100314
315 // Adds an input layer and an input tensor descriptor.
316 armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100317 CHECK(layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100318
319 // Adds a splitter layer.
320 armnn::ViewsDescriptor splitterDesc(2,4);
321
322 armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100323 CHECK(splitterLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100324
325 layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
326
327 // Adds a softmax layer 1.
328 armnn::SoftmaxDescriptor softmaxDescriptor;
329 armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100330 CHECK(softmax1Layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100331
332 splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
333
334 // Adds a softmax layer 2.
335 armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100336 CHECK(softmax2Layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100337
338 splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
339
340 // Adds addition layer.
341 layer = net.AddAdditionLayer("add layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100342 CHECK(layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100343
344 softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
345 softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
346
347 // Adds an output layer.
348 armnn::IConnectableLayer* prevLayer = layer;
349 layer = net.AddOutputLayer(0, "output layer");
350
351 prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
352
Sadik Armagan1625efc2021-06-10 18:24:34 +0100353 CHECK(layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100354}
355
Sadik Armagan1625efc2021-06-10 18:24:34 +0100356TEST_CASE("NetworkModification_SplitterMultiplication")
telsoa01c577f2c2018-08-31 09:22:23 +0100357{
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000358 armnn::NetworkImpl net;
telsoa01c577f2c2018-08-31 09:22:23 +0100359
360 // Adds an input layer and an input tensor descriptor.
361 armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100362 CHECK(layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100363
364 // Adds a splitter layer.
365 armnn::ViewsDescriptor splitterDesc(2,4);
366 armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100367 CHECK(splitterLayer);
telsoa01c577f2c2018-08-31 09:22:23 +0100368
369 layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
370
371 // Adds a softmax layer 1.
372 armnn::SoftmaxDescriptor softmaxDescriptor;
373 armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100374 CHECK(softmax1Layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100375
376 splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
377
378 // Adds a softmax layer 2.
379 armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100380 CHECK(softmax2Layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100381
382 splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
383
384 // Adds multiplication layer.
385 layer = net.AddMultiplicationLayer("multiplication layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100386 CHECK(layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100387
388 softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
389 softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
390
391 // Adds an output layer.
392 armnn::IConnectableLayer* prevLayer = layer;
393 layer = net.AddOutputLayer(0, "output layer");
Sadik Armagan1625efc2021-06-10 18:24:34 +0100394 CHECK(layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100395
396 prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
397}
398
Sadik Armagan1625efc2021-06-10 18:24:34 +0100399TEST_CASE("Network_AddQuantize")
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000400{
Jan Eilers1b2654f2021-09-24 15:45:46 +0100401 struct Test : public armnn::IStrategy
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000402 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100403 void ExecuteStrategy(const armnn::IConnectableLayer* layer,
404 const armnn::BaseDescriptor& descriptor,
405 const std::vector<armnn::ConstTensor>& constants,
406 const char* name,
407 const armnn::LayerBindingId id = 0) override
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000408 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100409 armnn::IgnoreUnused(descriptor, constants, id);
410 switch (layer->GetType())
411 {
412 case armnn::LayerType::Input: break;
413 case armnn::LayerType::Output: break;
414 case armnn::LayerType::Quantize:
415 {
416 m_Visited = true;
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000417
Jan Eilers1b2654f2021-09-24 15:45:46 +0100418 CHECK(layer);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000419
Jan Eilers1b2654f2021-09-24 15:45:46 +0100420 std::string expectedName = std::string("quantize");
421 CHECK(std::string(layer->GetName()) == expectedName);
422 CHECK(std::string(name) == expectedName);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000423
Jan Eilers1b2654f2021-09-24 15:45:46 +0100424 CHECK(layer->GetNumInputSlots() == 1);
425 CHECK(layer->GetNumOutputSlots() == 1);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000426
Jan Eilers1b2654f2021-09-24 15:45:46 +0100427 const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
428 CHECK((infoIn.GetDataType() == armnn::DataType::Float32));
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000429
Jan Eilers1b2654f2021-09-24 15:45:46 +0100430 const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
431 CHECK((infoOut.GetDataType() == armnn::DataType::QAsymmU8));
432 break;
433 }
434 default:
435 {
436 // nothing
437 }
438 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000439 }
440
441 bool m_Visited = false;
442 };
443
444
445 auto graph = armnn::INetwork::Create();
446
447 auto input = graph->AddInputLayer(0, "input");
448 auto quantize = graph->AddQuantizeLayer("quantize");
449 auto output = graph->AddOutputLayer(1, "output");
450
451 input->GetOutputSlot(0).Connect(quantize->GetInputSlot(0));
452 quantize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
453
454 armnn::TensorInfo infoIn({3,1}, armnn::DataType::Float32);
455 input->GetOutputSlot(0).SetTensorInfo(infoIn);
456
Derek Lambertif90c56d2020-01-10 17:14:08 +0000457 armnn::TensorInfo infoOut({3,1}, armnn::DataType::QAsymmU8);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000458 quantize->GetOutputSlot(0).SetTensorInfo(infoOut);
459
460 Test testQuantize;
Jan Eilers1b2654f2021-09-24 15:45:46 +0100461 graph->ExecuteStrategy(testQuantize);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000462
Sadik Armagan1625efc2021-06-10 18:24:34 +0100463 CHECK(testQuantize.m_Visited == true);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000464
465}
466
Sadik Armagan1625efc2021-06-10 18:24:34 +0100467TEST_CASE("Network_AddMerge")
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100468{
Jan Eilers1b2654f2021-09-24 15:45:46 +0100469 struct Test : public armnn::IStrategy
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100470 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100471 void ExecuteStrategy(const armnn::IConnectableLayer* layer,
472 const armnn::BaseDescriptor& descriptor,
473 const std::vector<armnn::ConstTensor>& constants,
474 const char* name,
475 const armnn::LayerBindingId id = 0) override
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100476 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100477 armnn::IgnoreUnused(descriptor, constants, id);
478 switch (layer->GetType())
479 {
480 case armnn::LayerType::Input: break;
481 case armnn::LayerType::Output: break;
482 case armnn::LayerType::Merge:
483 {
484 m_Visited = true;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100485
Jan Eilers1b2654f2021-09-24 15:45:46 +0100486 CHECK(layer);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100487
Jan Eilers1b2654f2021-09-24 15:45:46 +0100488 std::string expectedName = std::string("merge");
489 CHECK(std::string(layer->GetName()) == expectedName);
490 CHECK(std::string(name) == expectedName);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100491
Jan Eilers1b2654f2021-09-24 15:45:46 +0100492 CHECK(layer->GetNumInputSlots() == 2);
493 CHECK(layer->GetNumOutputSlots() == 1);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100494
Jan Eilers1b2654f2021-09-24 15:45:46 +0100495 const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
496 CHECK((infoIn0.GetDataType() == armnn::DataType::Float32));
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100497
Jan Eilers1b2654f2021-09-24 15:45:46 +0100498 const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo();
499 CHECK((infoIn1.GetDataType() == armnn::DataType::Float32));
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100500
Jan Eilers1b2654f2021-09-24 15:45:46 +0100501 const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
502 CHECK((infoOut.GetDataType() == armnn::DataType::Float32));
503 break;
504 }
505 default:
506 {
507 // nothing
508 }
509 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100510 }
511
512 bool m_Visited = false;
513 };
514
515 armnn::INetworkPtr network = armnn::INetwork::Create();
516
517 armnn::IConnectableLayer* input0 = network->AddInputLayer(0);
518 armnn::IConnectableLayer* input1 = network->AddInputLayer(1);
519 armnn::IConnectableLayer* merge = network->AddMergeLayer("merge");
520 armnn::IConnectableLayer* output = network->AddOutputLayer(0);
521
522 input0->GetOutputSlot(0).Connect(merge->GetInputSlot(0));
523 input1->GetOutputSlot(0).Connect(merge->GetInputSlot(1));
524 merge->GetOutputSlot(0).Connect(output->GetInputSlot(0));
525
526 const armnn::TensorInfo info({3,1}, armnn::DataType::Float32);
527 input0->GetOutputSlot(0).SetTensorInfo(info);
528 input1->GetOutputSlot(0).SetTensorInfo(info);
529 merge->GetOutputSlot(0).SetTensorInfo(info);
530
531 Test testMerge;
Jan Eilers1b2654f2021-09-24 15:45:46 +0100532 network->ExecuteStrategy(testMerge);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100533
Sadik Armagan1625efc2021-06-10 18:24:34 +0100534 CHECK(testMerge.m_Visited == true);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100535}
536
Sadik Armagan1625efc2021-06-10 18:24:34 +0100537TEST_CASE("StandInLayerNetworkTest")
Colm Donelana8769822019-10-23 14:18:56 +0100538{
539 // Create a simple network with a StandIn some place in it.
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000540 armnn::NetworkImpl net;
Colm Donelana8769822019-10-23 14:18:56 +0100541 auto input = net.AddInputLayer(0);
542
543 // Add some valid layer.
544 auto floor = net.AddFloorLayer("Floor");
545
546 // Add a standin layer
547 armnn::StandInDescriptor standInDescriptor;
548 standInDescriptor.m_NumInputs = 1;
549 standInDescriptor.m_NumOutputs = 1;
550 auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
551
552 // Finally the output.
553 auto output = net.AddOutputLayer(0);
554
555 // Connect up the layers
556 input->GetOutputSlot(0).Connect(floor->GetInputSlot(0));
557
558 floor->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
559
560 standIn->GetOutputSlot(0).Connect(output->GetInputSlot(0));
561
562 // Check that the layer is there.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100563 CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
Colm Donelana8769822019-10-23 14:18:56 +0100564 // Check that it is connected as expected.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100565 CHECK(input->GetOutputSlot(0).GetConnection(0) == &floor->GetInputSlot(0));
566 CHECK(floor->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
567 CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output->GetInputSlot(0));
Colm Donelana8769822019-10-23 14:18:56 +0100568}
569
Sadik Armagan1625efc2021-06-10 18:24:34 +0100570TEST_CASE("StandInLayerSingleInputMultipleOutputsNetworkTest")
Colm Donelana8769822019-10-23 14:18:56 +0100571{
572 // Another test with one input and two outputs on the StandIn layer.
Francis Murtagh3d2b4b22021-02-15 18:23:17 +0000573 armnn::NetworkImpl net;
Colm Donelana8769822019-10-23 14:18:56 +0100574
575 // Create the input.
576 auto input = net.AddInputLayer(0);
577
578 // Add a standin layer
579 armnn::StandInDescriptor standInDescriptor;
580 standInDescriptor.m_NumInputs = 1;
581 standInDescriptor.m_NumOutputs = 2;
582 auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
583
584 // Add two outputs.
585 auto output0 = net.AddOutputLayer(0);
586 auto output1 = net.AddOutputLayer(1);
587
588 // Connect up the layers
589 input->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
590
591 // Connect the two outputs of the Standin to the two outputs.
592 standIn->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
593 standIn->GetOutputSlot(1).Connect(output1->GetInputSlot(0));
594
595 // Check that the layer is there.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100596 CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
Colm Donelana8769822019-10-23 14:18:56 +0100597 // Check that it is connected as expected.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100598 CHECK(input->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
599 CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output0->GetInputSlot(0));
600 CHECK(standIn->GetOutputSlot(1).GetConnection(0) == &output1->GetInputSlot(0));
Colm Donelana8769822019-10-23 14:18:56 +0100601}
602
Jim Flynne4665962022-01-31 16:08:53 +0000603TEST_CASE("ObtainConv2DDescriptorFromIConnectableLayer")
604{
605 armnn::NetworkImpl net;
606
607 unsigned int dims[] = { 10,1,1,1 };
608 std::vector<float> convWeightsData(10);
609 armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32, 0.0f, 0, true), convWeightsData);
610
611 armnn::Convolution2dDescriptor convDesc2d;
612 convDesc2d.m_PadLeft = 2;
613 convDesc2d.m_PadRight = 3;
614 convDesc2d.m_PadTop = 4;
615 convDesc2d.m_PadBottom = 5;
616 convDesc2d.m_StrideX = 2;
617 convDesc2d.m_StrideY = 1;
618 convDesc2d.m_DilationX = 3;
619 convDesc2d.m_DilationY = 3;
620 convDesc2d.m_BiasEnabled = false;
621 convDesc2d.m_DataLayout = armnn::DataLayout::NCHW;
622 armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d,
623 weights,
624 armnn::EmptyOptional(),
625 "conv layer");
626 CHECK(convLayer);
627
628 const armnn::BaseDescriptor& descriptor = convLayer->GetParameters();
629 CHECK(descriptor.IsNull() == false);
630 const armnn::Convolution2dDescriptor& originalDescriptor =
631 static_cast<const armnn::Convolution2dDescriptor&>(descriptor);
632 CHECK(originalDescriptor.m_PadLeft == 2);
633 CHECK(originalDescriptor.m_PadRight == 3);
634 CHECK(originalDescriptor.m_PadTop == 4);
635 CHECK(originalDescriptor.m_PadBottom == 5);
636 CHECK(originalDescriptor.m_StrideX == 2);
637 CHECK(originalDescriptor.m_StrideY == 1);
638 CHECK(originalDescriptor.m_DilationX == 3);
639 CHECK(originalDescriptor.m_DilationY == 3);
640 CHECK(originalDescriptor.m_BiasEnabled == false);
641 CHECK(originalDescriptor.m_DataLayout == armnn::DataLayout::NCHW);
642}
643
644TEST_CASE("CheckNullDescriptor")
645{
646 armnn::NetworkImpl net;
647 armnn::IConnectableLayer* const addLayer = net.AddAdditionLayer();
648
649 CHECK(addLayer);
650
651 const armnn::BaseDescriptor& descriptor = addLayer->GetParameters();
652 // additional layer has no descriptor so a NullDescriptor will be returned
653 CHECK(descriptor.IsNull() == true);
654}
655
Sadik Armagan1625efc2021-06-10 18:24:34 +0100656}