telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
David Beck | ecb56cd | 2018-09-05 12:52:57 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 4 | // |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 5 | |
| 6 | #include "GraphUtils.hpp" |
| 7 | |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 8 | #include <armnn/ArmNN.hpp> |
Derek Lamberti | a9cca6a | 2019-03-25 15:41:58 +0000 | [diff] [blame] | 9 | #include <armnn/LayerVisitorBase.hpp> |
Aron Virginas-Tar | c9cc804 | 2018-11-01 16:15:57 +0000 | [diff] [blame] | 10 | #include <Network.hpp> |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 11 | |
| 12 | #include <boost/test/unit_test.hpp> |
| 13 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 14 | namespace |
| 15 | { |
| 16 | |
| 17 | bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer) |
| 18 | { |
| 19 | bool allConnected = true; |
| 20 | for (unsigned int i = 0; i < layer.GetNumInputSlots(); ++i) |
| 21 | { |
| 22 | const bool inputConnected = layer.GetInputSlot(i).GetConnection() != nullptr; |
| 23 | allConnected &= inputConnected; |
| 24 | } |
| 25 | return allConnected; |
| 26 | } |
| 27 | |
| 28 | } |
| 29 | |
| 30 | BOOST_AUTO_TEST_SUITE(Network) |
| 31 | |
| 32 | BOOST_AUTO_TEST_CASE(LayerGuids) |
| 33 | { |
| 34 | armnn::Network net; |
| 35 | armnn::LayerGuid inputId = net.AddInputLayer(0)->GetGuid(); |
| 36 | armnn::LayerGuid addId = net.AddAdditionLayer()->GetGuid(); |
| 37 | armnn::LayerGuid outputId = net.AddOutputLayer(0)->GetGuid(); |
| 38 | |
| 39 | BOOST_TEST(inputId != addId); |
| 40 | BOOST_TEST(addId != outputId); |
| 41 | BOOST_TEST(inputId != outputId); |
| 42 | } |
| 43 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 44 | BOOST_AUTO_TEST_CASE(NetworkBasic) |
| 45 | { |
| 46 | armnn::Network net; |
| 47 | BOOST_TEST(net.PrintGraph() == armnn::Status::Success); |
| 48 | } |
| 49 | |
| 50 | BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForINetwork) |
| 51 | { |
| 52 | armnn::Network net; |
| 53 | armnn::INetwork& inet = net; |
| 54 | inet.AddInputLayer(0); |
| 55 | inet.AddAdditionLayer(); |
| 56 | inet.AddActivationLayer(armnn::ActivationDescriptor()); |
| 57 | inet.AddOutputLayer(0); |
| 58 | } |
| 59 | |
| 60 | BOOST_AUTO_TEST_CASE(LayerNamesAreOptionalForNetwork) |
| 61 | { |
| 62 | armnn::Network net; |
| 63 | net.AddInputLayer(0); |
| 64 | net.AddAdditionLayer(); |
| 65 | net.AddActivationLayer(armnn::ActivationDescriptor()); |
| 66 | net.AddOutputLayer(0); |
| 67 | } |
| 68 | |
| 69 | BOOST_AUTO_TEST_CASE(NetworkModification) |
| 70 | { |
| 71 | armnn::Network net; |
| 72 | |
| 73 | armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer"); |
| 74 | BOOST_TEST(inputLayer); |
| 75 | |
| 76 | unsigned int dims[] = { 10,1,1,1 }; |
| 77 | std::vector<float> convWeightsData(10); |
| 78 | armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32), convWeightsData); |
| 79 | |
| 80 | armnn::Convolution2dDescriptor convDesc2d; |
Matteo Martincigh | fc598e1 | 2019-05-14 10:36:13 +0100 | [diff] [blame] | 81 | armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d, |
| 82 | weights, |
| 83 | armnn::EmptyOptional(), |
| 84 | "conv layer"); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 85 | BOOST_TEST(convLayer); |
| 86 | |
| 87 | inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0)); |
| 88 | |
| 89 | armnn::FullyConnectedDescriptor fullyConnectedDesc; |
| 90 | armnn::IConnectableLayer* const fullyConnectedLayer = net.AddFullyConnectedLayer(fullyConnectedDesc, |
| 91 | weights, |
Matteo Martincigh | fc598e1 | 2019-05-14 10:36:13 +0100 | [diff] [blame] | 92 | armnn::EmptyOptional(), |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 93 | "fully connected"); |
| 94 | BOOST_TEST(fullyConnectedLayer); |
| 95 | |
| 96 | convLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0)); |
| 97 | |
| 98 | armnn::Pooling2dDescriptor pooling2dDesc; |
| 99 | armnn::IConnectableLayer* const poolingLayer = net.AddPooling2dLayer(pooling2dDesc, "pooling2d"); |
| 100 | BOOST_TEST(poolingLayer); |
| 101 | |
| 102 | fullyConnectedLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0)); |
| 103 | |
| 104 | armnn::ActivationDescriptor activationDesc; |
| 105 | armnn::IConnectableLayer* const activationLayer = net.AddActivationLayer(activationDesc, "activation"); |
| 106 | BOOST_TEST(activationLayer); |
| 107 | |
| 108 | poolingLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0)); |
| 109 | |
| 110 | armnn::NormalizationDescriptor normalizationDesc; |
| 111 | armnn::IConnectableLayer* const normalizationLayer = net.AddNormalizationLayer(normalizationDesc, "normalization"); |
| 112 | BOOST_TEST(normalizationLayer); |
| 113 | |
| 114 | activationLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0)); |
| 115 | |
| 116 | armnn::SoftmaxDescriptor softmaxDesc; |
| 117 | armnn::IConnectableLayer* const softmaxLayer = net.AddSoftmaxLayer(softmaxDesc, "softmax"); |
| 118 | BOOST_TEST(softmaxLayer); |
| 119 | |
| 120 | normalizationLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0)); |
| 121 | |
| 122 | armnn::BatchNormalizationDescriptor batchNormDesc; |
| 123 | |
| 124 | armnn::TensorInfo tensorInfo({ 1 }, armnn::DataType::Float32); |
| 125 | std::vector<float> data(tensorInfo.GetNumBytes() / sizeof(float)); |
| 126 | armnn::ConstTensor invalidTensor(tensorInfo, data); |
| 127 | |
| 128 | armnn::IConnectableLayer* const batchNormalizationLayer = net.AddBatchNormalizationLayer(batchNormDesc, |
| 129 | invalidTensor, |
| 130 | invalidTensor, |
| 131 | invalidTensor, |
| 132 | invalidTensor, |
| 133 | "batch norm"); |
| 134 | BOOST_TEST(batchNormalizationLayer); |
| 135 | |
| 136 | softmaxLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0)); |
| 137 | |
| 138 | armnn::IConnectableLayer* const additionLayer = net.AddAdditionLayer("addition"); |
| 139 | BOOST_TEST(additionLayer); |
| 140 | |
| 141 | batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0)); |
| 142 | batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1)); |
| 143 | |
| 144 | armnn::IConnectableLayer* const multiplicationLayer = net.AddMultiplicationLayer("multiplication"); |
| 145 | BOOST_TEST(multiplicationLayer); |
| 146 | |
| 147 | additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0)); |
| 148 | additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1)); |
| 149 | |
| 150 | armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer"); |
| 151 | BOOST_TEST(outputLayer); |
| 152 | |
| 153 | multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); |
| 154 | |
| 155 | //Tests that all layers are present in the graph. |
| 156 | BOOST_TEST(net.GetGraph().GetNumLayers() == 11); |
| 157 | |
| 158 | //Tests that the vertices exist and have correct names. |
| 159 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "input layer")); |
| 160 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "conv layer")); |
| 161 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "fully connected")); |
| 162 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "pooling2d")); |
| 163 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "activation")); |
| 164 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "normalization")); |
| 165 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "softmax")); |
| 166 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "batch norm")); |
| 167 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "addition")); |
| 168 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "multiplication")); |
| 169 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "output layer")); |
| 170 | |
| 171 | auto checkOneOutputToOneInputConnection = [] |
| 172 | (const armnn::IConnectableLayer* const srcLayer, |
| 173 | const armnn::IConnectableLayer* const tgtLayer, |
| 174 | int expectedSrcNumInputs = 1, |
| 175 | int expectedDstNumOutputs = 1) |
| 176 | { |
| 177 | BOOST_TEST(srcLayer->GetNumInputSlots() == expectedSrcNumInputs); |
| 178 | BOOST_TEST(srcLayer->GetNumOutputSlots() == 1); |
| 179 | BOOST_TEST(tgtLayer->GetNumInputSlots() == 1); |
| 180 | BOOST_TEST(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs); |
| 181 | |
| 182 | BOOST_TEST(srcLayer->GetOutputSlot(0).GetNumConnections() == 1); |
| 183 | BOOST_TEST(srcLayer->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0)); |
| 184 | BOOST_TEST(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection()); |
| 185 | }; |
| 186 | auto checkOneOutputToTwoInputsConnections = [] |
| 187 | (const armnn::IConnectableLayer* const srcLayer, |
| 188 | const armnn::IConnectableLayer* const tgtLayer, |
| 189 | int expectedSrcNumInputs, |
| 190 | int expectedDstNumOutputs = 1) |
| 191 | { |
| 192 | BOOST_TEST(srcLayer->GetNumInputSlots() == expectedSrcNumInputs); |
| 193 | BOOST_TEST(srcLayer->GetNumOutputSlots() == 1); |
| 194 | BOOST_TEST(tgtLayer->GetNumInputSlots() == 2); |
| 195 | BOOST_TEST(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs); |
| 196 | |
| 197 | BOOST_TEST(srcLayer->GetOutputSlot(0).GetNumConnections() == 2); |
| 198 | for (unsigned int i = 0; i < srcLayer->GetOutputSlot(0).GetNumConnections(); ++i) |
| 199 | { |
| 200 | BOOST_TEST(srcLayer->GetOutputSlot(0).GetConnection(i) == &tgtLayer->GetInputSlot(i)); |
| 201 | BOOST_TEST(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(i).GetConnection()); |
| 202 | } |
| 203 | }; |
| 204 | |
| 205 | BOOST_TEST(AreAllLayerInputSlotsConnected(*convLayer)); |
| 206 | BOOST_TEST(AreAllLayerInputSlotsConnected(*fullyConnectedLayer)); |
| 207 | BOOST_TEST(AreAllLayerInputSlotsConnected(*poolingLayer)); |
| 208 | BOOST_TEST(AreAllLayerInputSlotsConnected(*activationLayer)); |
| 209 | BOOST_TEST(AreAllLayerInputSlotsConnected(*normalizationLayer)); |
| 210 | BOOST_TEST(AreAllLayerInputSlotsConnected(*softmaxLayer)); |
| 211 | BOOST_TEST(AreAllLayerInputSlotsConnected(*batchNormalizationLayer)); |
| 212 | BOOST_TEST(AreAllLayerInputSlotsConnected(*additionLayer)); |
| 213 | BOOST_TEST(AreAllLayerInputSlotsConnected(*multiplicationLayer)); |
| 214 | BOOST_TEST(AreAllLayerInputSlotsConnected(*outputLayer)); |
| 215 | |
| 216 | // Checks connectivity. |
| 217 | checkOneOutputToOneInputConnection(inputLayer, convLayer, 0); |
| 218 | checkOneOutputToOneInputConnection(convLayer, fullyConnectedLayer); |
| 219 | checkOneOutputToOneInputConnection(fullyConnectedLayer, poolingLayer); |
| 220 | checkOneOutputToOneInputConnection(poolingLayer, activationLayer); |
| 221 | checkOneOutputToOneInputConnection(activationLayer, normalizationLayer); |
| 222 | checkOneOutputToOneInputConnection(normalizationLayer, softmaxLayer); |
| 223 | checkOneOutputToOneInputConnection(softmaxLayer, batchNormalizationLayer); |
| 224 | checkOneOutputToTwoInputsConnections(batchNormalizationLayer, additionLayer, 1); |
| 225 | checkOneOutputToTwoInputsConnections(additionLayer, multiplicationLayer, 2); |
| 226 | checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0); |
| 227 | } |
| 228 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 229 | BOOST_AUTO_TEST_CASE(NetworkModification_SplitterConcat) |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 230 | { |
| 231 | armnn::Network net; |
| 232 | |
| 233 | // Adds an input layer and an input tensor descriptor. |
| 234 | armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer"); |
| 235 | BOOST_TEST(inputLayer); |
| 236 | |
| 237 | // Adds a splitter layer. |
| 238 | armnn::ViewsDescriptor splitterDesc(2,4); |
| 239 | |
| 240 | armnn::IConnectableLayer* splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer"); |
| 241 | BOOST_TEST(splitterLayer); |
| 242 | |
| 243 | inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0)); |
| 244 | |
| 245 | // Adds a softmax layer 1. |
| 246 | armnn::SoftmaxDescriptor softmaxDescriptor; |
| 247 | armnn::IConnectableLayer* softmaxLayer1 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1"); |
| 248 | BOOST_TEST(softmaxLayer1); |
| 249 | |
| 250 | splitterLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0)); |
| 251 | |
| 252 | // Adds a softmax layer 2. |
| 253 | armnn::IConnectableLayer* softmaxLayer2 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2"); |
| 254 | BOOST_TEST(softmaxLayer2); |
| 255 | |
| 256 | splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0)); |
| 257 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 258 | // Adds a concat layer. |
| 259 | armnn::OriginsDescriptor concatDesc(2, 4); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 260 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 261 | armnn::IConnectableLayer* concatLayer = net.AddConcatLayer(concatDesc, "concat layer"); |
| 262 | BOOST_TEST(concatLayer); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 263 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 264 | softmaxLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0)); |
| 265 | softmaxLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1)); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 266 | |
| 267 | // Adds an output layer. |
| 268 | armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer"); |
| 269 | BOOST_TEST(outputLayer); |
| 270 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 271 | concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 272 | |
| 273 | BOOST_TEST(splitterLayer->GetNumOutputSlots() == 2); |
| 274 | BOOST_TEST(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0)); |
| 275 | BOOST_TEST(&splitterLayer->GetOutputSlot(0) == softmaxLayer1->GetInputSlot(0).GetConnection()); |
| 276 | BOOST_TEST(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0)); |
| 277 | BOOST_TEST(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection()); |
| 278 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 279 | BOOST_TEST(concatLayer->GetNumInputSlots() == 2); |
| 280 | BOOST_TEST(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(0)); |
| 281 | BOOST_TEST(&softmaxLayer1->GetOutputSlot(0) == concatLayer->GetInputSlot(0).GetConnection()); |
| 282 | BOOST_TEST(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(1)); |
| 283 | BOOST_TEST(&softmaxLayer2->GetOutputSlot(0) == concatLayer->GetInputSlot(1).GetConnection()); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 284 | } |
| 285 | |
| 286 | BOOST_AUTO_TEST_CASE(NetworkModification_SplitterAddition) |
| 287 | { |
| 288 | armnn::Network net; |
| 289 | |
| 290 | // Adds an input layer and an input tensor descriptor. |
| 291 | armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer"); |
| 292 | BOOST_TEST(layer); |
| 293 | |
| 294 | // Adds a splitter layer. |
| 295 | armnn::ViewsDescriptor splitterDesc(2,4); |
| 296 | |
| 297 | armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer"); |
| 298 | BOOST_TEST(splitterLayer); |
| 299 | |
| 300 | layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0)); |
| 301 | |
| 302 | // Adds a softmax layer 1. |
| 303 | armnn::SoftmaxDescriptor softmaxDescriptor; |
| 304 | armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1"); |
| 305 | BOOST_TEST(softmax1Layer); |
| 306 | |
| 307 | splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0)); |
| 308 | |
| 309 | // Adds a softmax layer 2. |
| 310 | armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2"); |
| 311 | BOOST_TEST(softmax2Layer); |
| 312 | |
| 313 | splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0)); |
| 314 | |
| 315 | // Adds addition layer. |
| 316 | layer = net.AddAdditionLayer("add layer"); |
| 317 | BOOST_TEST(layer); |
| 318 | |
| 319 | softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 320 | softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); |
| 321 | |
| 322 | // Adds an output layer. |
| 323 | armnn::IConnectableLayer* prevLayer = layer; |
| 324 | layer = net.AddOutputLayer(0, "output layer"); |
| 325 | |
| 326 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 327 | |
| 328 | BOOST_TEST(layer); |
| 329 | } |
| 330 | |
| 331 | BOOST_AUTO_TEST_CASE(NetworkModification_SplitterMultiplication) |
| 332 | { |
| 333 | armnn::Network net; |
| 334 | |
| 335 | // Adds an input layer and an input tensor descriptor. |
| 336 | armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer"); |
| 337 | BOOST_TEST(layer); |
| 338 | |
| 339 | // Adds a splitter layer. |
| 340 | armnn::ViewsDescriptor splitterDesc(2,4); |
| 341 | armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer"); |
| 342 | BOOST_TEST(splitterLayer); |
| 343 | |
| 344 | layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0)); |
| 345 | |
| 346 | // Adds a softmax layer 1. |
| 347 | armnn::SoftmaxDescriptor softmaxDescriptor; |
| 348 | armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1"); |
| 349 | BOOST_TEST(softmax1Layer); |
| 350 | |
| 351 | splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0)); |
| 352 | |
| 353 | // Adds a softmax layer 2. |
| 354 | armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2"); |
| 355 | BOOST_TEST(softmax2Layer); |
| 356 | |
| 357 | splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0)); |
| 358 | |
| 359 | // Adds multiplication layer. |
| 360 | layer = net.AddMultiplicationLayer("multiplication layer"); |
| 361 | BOOST_TEST(layer); |
| 362 | |
| 363 | softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 364 | softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); |
| 365 | |
| 366 | // Adds an output layer. |
| 367 | armnn::IConnectableLayer* prevLayer = layer; |
| 368 | layer = net.AddOutputLayer(0, "output layer"); |
| 369 | BOOST_TEST(layer); |
| 370 | |
| 371 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 372 | } |
| 373 | |
Derek Lamberti | a9cca6a | 2019-03-25 15:41:58 +0000 | [diff] [blame] | 374 | BOOST_AUTO_TEST_CASE(Network_AddQuantize) |
| 375 | { |
| 376 | struct Test : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy> |
| 377 | { |
| 378 | void VisitQuantizeLayer(const armnn::IConnectableLayer* layer, const char* name) override |
| 379 | { |
| 380 | m_Visited = true; |
| 381 | |
| 382 | BOOST_TEST(layer); |
| 383 | |
| 384 | std::string expectedName = std::string("quantize"); |
| 385 | BOOST_TEST(std::string(layer->GetName()) == expectedName); |
| 386 | BOOST_TEST(std::string(name) == expectedName); |
| 387 | |
| 388 | BOOST_TEST(layer->GetNumInputSlots() == 1); |
| 389 | BOOST_TEST(layer->GetNumOutputSlots() == 1); |
| 390 | |
| 391 | const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo(); |
| 392 | BOOST_TEST((infoIn.GetDataType() == armnn::DataType::Float32)); |
| 393 | |
| 394 | const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo(); |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame^] | 395 | BOOST_TEST((infoOut.GetDataType() == armnn::DataType::QAsymmU8)); |
Derek Lamberti | a9cca6a | 2019-03-25 15:41:58 +0000 | [diff] [blame] | 396 | } |
| 397 | |
| 398 | bool m_Visited = false; |
| 399 | }; |
| 400 | |
| 401 | |
| 402 | auto graph = armnn::INetwork::Create(); |
| 403 | |
| 404 | auto input = graph->AddInputLayer(0, "input"); |
| 405 | auto quantize = graph->AddQuantizeLayer("quantize"); |
| 406 | auto output = graph->AddOutputLayer(1, "output"); |
| 407 | |
| 408 | input->GetOutputSlot(0).Connect(quantize->GetInputSlot(0)); |
| 409 | quantize->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 410 | |
| 411 | armnn::TensorInfo infoIn({3,1}, armnn::DataType::Float32); |
| 412 | input->GetOutputSlot(0).SetTensorInfo(infoIn); |
| 413 | |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame^] | 414 | armnn::TensorInfo infoOut({3,1}, armnn::DataType::QAsymmU8); |
Derek Lamberti | a9cca6a | 2019-03-25 15:41:58 +0000 | [diff] [blame] | 415 | quantize->GetOutputSlot(0).SetTensorInfo(infoOut); |
| 416 | |
| 417 | Test testQuantize; |
| 418 | graph->Accept(testQuantize); |
| 419 | |
| 420 | BOOST_TEST(testQuantize.m_Visited == true); |
| 421 | |
| 422 | } |
| 423 | |
Nattapat Chaimanowong | 1f88630 | 2019-04-05 13:37:19 +0100 | [diff] [blame] | 424 | BOOST_AUTO_TEST_CASE(Network_AddMerge) |
| 425 | { |
| 426 | struct Test : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy> |
| 427 | { |
| 428 | void VisitMergeLayer(const armnn::IConnectableLayer* layer, const char* name) override |
| 429 | { |
| 430 | m_Visited = true; |
| 431 | |
| 432 | BOOST_TEST(layer); |
| 433 | |
| 434 | std::string expectedName = std::string("merge"); |
| 435 | BOOST_TEST(std::string(layer->GetName()) == expectedName); |
| 436 | BOOST_TEST(std::string(name) == expectedName); |
| 437 | |
| 438 | BOOST_TEST(layer->GetNumInputSlots() == 2); |
| 439 | BOOST_TEST(layer->GetNumOutputSlots() == 1); |
| 440 | |
| 441 | const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo(); |
| 442 | BOOST_TEST((infoIn0.GetDataType() == armnn::DataType::Float32)); |
| 443 | |
| 444 | const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo(); |
| 445 | BOOST_TEST((infoIn1.GetDataType() == armnn::DataType::Float32)); |
| 446 | |
| 447 | const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo(); |
| 448 | BOOST_TEST((infoOut.GetDataType() == armnn::DataType::Float32)); |
| 449 | } |
| 450 | |
| 451 | bool m_Visited = false; |
| 452 | }; |
| 453 | |
| 454 | armnn::INetworkPtr network = armnn::INetwork::Create(); |
| 455 | |
| 456 | armnn::IConnectableLayer* input0 = network->AddInputLayer(0); |
| 457 | armnn::IConnectableLayer* input1 = network->AddInputLayer(1); |
| 458 | armnn::IConnectableLayer* merge = network->AddMergeLayer("merge"); |
| 459 | armnn::IConnectableLayer* output = network->AddOutputLayer(0); |
| 460 | |
| 461 | input0->GetOutputSlot(0).Connect(merge->GetInputSlot(0)); |
| 462 | input1->GetOutputSlot(0).Connect(merge->GetInputSlot(1)); |
| 463 | merge->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 464 | |
| 465 | const armnn::TensorInfo info({3,1}, armnn::DataType::Float32); |
| 466 | input0->GetOutputSlot(0).SetTensorInfo(info); |
| 467 | input1->GetOutputSlot(0).SetTensorInfo(info); |
| 468 | merge->GetOutputSlot(0).SetTensorInfo(info); |
| 469 | |
| 470 | Test testMerge; |
| 471 | network->Accept(testMerge); |
| 472 | |
| 473 | BOOST_TEST(testMerge.m_Visited == true); |
| 474 | } |
| 475 | |
Colm Donelan | a876982 | 2019-10-23 14:18:56 +0100 | [diff] [blame] | 476 | BOOST_AUTO_TEST_CASE(StandInLayerNetworkTest) |
| 477 | { |
| 478 | // Create a simple network with a StandIn some place in it. |
| 479 | armnn::Network net; |
| 480 | auto input = net.AddInputLayer(0); |
| 481 | |
| 482 | // Add some valid layer. |
| 483 | auto floor = net.AddFloorLayer("Floor"); |
| 484 | |
| 485 | // Add a standin layer |
| 486 | armnn::StandInDescriptor standInDescriptor; |
| 487 | standInDescriptor.m_NumInputs = 1; |
| 488 | standInDescriptor.m_NumOutputs = 1; |
| 489 | auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn"); |
| 490 | |
| 491 | // Finally the output. |
| 492 | auto output = net.AddOutputLayer(0); |
| 493 | |
| 494 | // Connect up the layers |
| 495 | input->GetOutputSlot(0).Connect(floor->GetInputSlot(0)); |
| 496 | |
| 497 | floor->GetOutputSlot(0).Connect(standIn->GetInputSlot(0)); |
| 498 | |
| 499 | standIn->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 500 | |
| 501 | // Check that the layer is there. |
| 502 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "StandIn")); |
| 503 | // Check that it is connected as expected. |
| 504 | BOOST_TEST(input->GetOutputSlot(0).GetConnection(0) == &floor->GetInputSlot(0)); |
| 505 | BOOST_TEST(floor->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0)); |
| 506 | BOOST_TEST(standIn->GetOutputSlot(0).GetConnection(0) == &output->GetInputSlot(0)); |
| 507 | } |
| 508 | |
| 509 | BOOST_AUTO_TEST_CASE(StandInLayerSingleInputMultipleOutputsNetworkTest) |
| 510 | { |
| 511 | // Another test with one input and two outputs on the StandIn layer. |
| 512 | armnn::Network net; |
| 513 | |
| 514 | // Create the input. |
| 515 | auto input = net.AddInputLayer(0); |
| 516 | |
| 517 | // Add a standin layer |
| 518 | armnn::StandInDescriptor standInDescriptor; |
| 519 | standInDescriptor.m_NumInputs = 1; |
| 520 | standInDescriptor.m_NumOutputs = 2; |
| 521 | auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn"); |
| 522 | |
| 523 | // Add two outputs. |
| 524 | auto output0 = net.AddOutputLayer(0); |
| 525 | auto output1 = net.AddOutputLayer(1); |
| 526 | |
| 527 | // Connect up the layers |
| 528 | input->GetOutputSlot(0).Connect(standIn->GetInputSlot(0)); |
| 529 | |
| 530 | // Connect the two outputs of the Standin to the two outputs. |
| 531 | standIn->GetOutputSlot(0).Connect(output0->GetInputSlot(0)); |
| 532 | standIn->GetOutputSlot(1).Connect(output1->GetInputSlot(0)); |
| 533 | |
| 534 | // Check that the layer is there. |
| 535 | BOOST_TEST(GraphHasNamedLayer(net.GetGraph(), "StandIn")); |
| 536 | // Check that it is connected as expected. |
| 537 | BOOST_TEST(input->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0)); |
| 538 | BOOST_TEST(standIn->GetOutputSlot(0).GetConnection(0) == &output0->GetInputSlot(0)); |
| 539 | BOOST_TEST(standIn->GetOutputSlot(1).GetConnection(0) == &output1->GetInputSlot(0)); |
| 540 | } |
| 541 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 542 | BOOST_AUTO_TEST_SUITE_END() |