Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
Sadik Armagan | a097d2a | 2021-11-24 15:47:28 +0000 | [diff] [blame] | 6 | #include <CommonTestUtils.hpp> |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 7 | |
Aron Virginas-Tar | c9cc804 | 2018-11-01 16:15:57 +0000 | [diff] [blame] | 8 | #include <Graph.hpp> |
| 9 | #include <Network.hpp> |
| 10 | |
| 11 | #include <reference/RefWorkloadFactory.hpp> |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 12 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 13 | #include <doctest/doctest.h> |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 14 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 15 | TEST_SUITE("OptimizedNetwork") |
| 16 | { |
| 17 | TEST_CASE("SerializeToDot") |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 18 | { |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 19 | // build up the structure of the network |
| 20 | armnn::INetworkPtr net(armnn::INetwork::Create()); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 21 | |
| 22 | //Defines layers. |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 23 | auto input = net->AddInputLayer(0); |
| 24 | auto add = net->AddAdditionLayer(); |
| 25 | auto output = net->AddOutputLayer(0); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 26 | |
| 27 | // Connects layers. |
| 28 | input->GetOutputSlot(0).Connect(add->GetInputSlot(0)); |
| 29 | input->GetOutputSlot(0).Connect(add->GetInputSlot(1)); |
| 30 | add->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 31 | |
| 32 | armnn::TensorShape shape({4}); |
| 33 | armnn::TensorInfo info(shape, armnn::DataType::Float32); |
| 34 | input->GetOutputSlot(0).SetTensorInfo(info); |
| 35 | add->GetOutputSlot(0).SetTensorInfo(info); |
| 36 | |
| 37 | armnn::IRuntime::CreationOptions options; |
| 38 | armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); |
| 39 | |
| 40 | std::vector<armnn::BackendId> backends = {armnn::Compute::CpuRef}; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 41 | armnn::IOptimizedNetworkPtr optimizedNet = armnn::Optimize(*net, backends, runtime->GetDeviceSpec()); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 42 | |
| 43 | std::ostringstream ss; |
| 44 | optimizedNet->SerializeToDot(ss); |
| 45 | |
| 46 | auto inputId = input->GetGuid(); |
| 47 | auto addId = add->GetGuid(); |
| 48 | auto outputId = output->GetGuid(); |
| 49 | |
| 50 | std::stringstream expected; |
| 51 | expected << |
| 52 | "digraph Optimized {\n" |
| 53 | " node [shape=\"record\"];\n" |
| 54 | " edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n" |
Rob Hughes | b17220d | 2020-08-28 11:48:35 +0100 | [diff] [blame] | 55 | " " << inputId << " [label=\"{Input|Guid : " << inputId << "\\lLayerType : Input\\l" |
| 56 | "BackendID : CpuRef\\l}\"];\n" |
| 57 | " " << addId << " [label=\"{Addition|Guid : " << addId << "\\lLayerType : Addition\\l" |
| 58 | "BackendID : CpuRef\\l}\"];\n" |
| 59 | " " << outputId << " [label=\"{Output|Guid : " << outputId << "\\lLayerType : Output\\l" |
| 60 | "BackendID : CpuRef\\l}\"];\n" |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 61 | " " << inputId << " -> " << addId << " [label=< [4] >];\n" |
| 62 | " " << inputId << " -> " << addId << " [label=< [4] >];\n" |
| 63 | " " << addId << " -> " << outputId << " [label=< [4] >];\n" |
| 64 | "}\n"; |
| 65 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 66 | CHECK(ss.str() == expected.str()); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 67 | } |
| 68 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 69 | TEST_CASE("OptimizeValidateDeviceNonSupportLayerNoFallback") |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 70 | { |
| 71 | // build up the structure of the network |
| 72 | armnn::INetworkPtr net(armnn::INetwork::Create()); |
| 73 | |
| 74 | armnn::IConnectableLayer* input = net->AddInputLayer(0); |
| 75 | |
| 76 | // This layer configuration isn't supported by CpuAcc and isn't allowed to fall back, so Optimize will return null. |
| 77 | armnn::NormalizationDescriptor descriptor; |
| 78 | armnn::IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor); |
| 79 | |
| 80 | armnn::IConnectableLayer* output = net->AddOutputLayer(0); |
| 81 | |
| 82 | input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0)); |
| 83 | normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 84 | |
| 85 | input->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo({ 1, 1, 4, 4 }, armnn::DataType::Float32)); |
| 86 | normalize->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo({ 1, 1, 4, 4 }, armnn::DataType::Float32)); |
| 87 | |
| 88 | armnn::IRuntime::CreationOptions options; |
| 89 | armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); |
| 90 | |
| 91 | std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc }; |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 92 | std::vector<std::string> errMessages; |
| 93 | |
| 94 | try |
| 95 | { |
| 96 | Optimize(*net, backends, runtime->GetDeviceSpec(), armnn::OptimizerOptions(), errMessages); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 97 | FAIL("Should have thrown an exception."); |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 98 | } |
Rob Hughes | c013bc8 | 2021-07-14 09:31:31 +0100 | [diff] [blame] | 99 | catch (const armnn::InvalidArgumentException&) |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 100 | { |
| 101 | // Different exceptions are thrown on different backends |
| 102 | } |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 103 | CHECK(errMessages.size() > 0); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 104 | } |
| 105 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 106 | TEST_CASE("OptimizeValidateDeviceNonSupportLayerWithFallback") |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 107 | { |
| 108 | // build up the structure of the network |
| 109 | armnn::INetworkPtr net(armnn::INetwork::Create()); |
| 110 | |
| 111 | armnn::IConnectableLayer* input = net->AddInputLayer(0); |
| 112 | |
| 113 | // This layer configuration isn't supported by CpuAcc but it allows to fallback to CpuRef. |
| 114 | armnn::NormalizationDescriptor descriptor; |
| 115 | armnn::IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor); |
| 116 | |
| 117 | armnn::IConnectableLayer* output = net->AddOutputLayer(0); |
| 118 | |
| 119 | input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0)); |
| 120 | normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 121 | |
| 122 | input->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo({ 1, 1, 4, 4 }, armnn::DataType::Float32)); |
| 123 | normalize->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo({ 1, 1, 4, 4 }, armnn::DataType::Float32)); |
| 124 | |
| 125 | armnn::IRuntime::CreationOptions options; |
| 126 | armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); |
| 127 | |
| 128 | std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef }; |
| 129 | armnn::IOptimizedNetworkPtr optNet = armnn::Optimize(*net, backends, runtime->GetDeviceSpec()); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 130 | REQUIRE(optNet); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 131 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 132 | armnn::Graph& graph = GetGraphForTesting(optNet.get()); |
| 133 | graph.AllocateDynamicBuffers(); |
| 134 | |
| 135 | for (auto&& layer : graph) |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 136 | { |
| 137 | // If NEON is enabled, Input and Output layers are supported by CpuAcc, |
| 138 | // the other layers are supported by CpuRef. |
| 139 | // If NEON is not enabled, all layers are supported by CpuRef. |
Matteo Martincigh | d95e906 | 2019-01-31 15:35:59 +0000 | [diff] [blame] | 140 | #if defined(ARMCOMPUTENEON_ENABLED) |
Finn Williams | b1aad42 | 2021-10-28 19:07:32 +0100 | [diff] [blame] | 141 | if (layer->GetType() == armnn::LayerType::Output) |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 142 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 143 | CHECK(layer->GetBackendId() == armnn::Compute::CpuAcc); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 144 | } |
| 145 | else if (layer->GetType() == armnn::LayerType::Normalization) |
| 146 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 147 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 148 | } |
| 149 | #else |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 150 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 151 | #endif |
| 152 | } |
| 153 | } |
| 154 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 155 | TEST_CASE("OptimizeValidateWorkloadsUndefinedComputeDevice") |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 156 | { |
| 157 | const armnn::TensorInfo desc({3, 5}, armnn::DataType::Float32); |
| 158 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 159 | // build up the structure of the network |
| 160 | armnn::INetworkPtr net(armnn::INetwork::Create()); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 161 | |
| 162 | armnn::NormalizationDescriptor nmDesc; |
| 163 | armnn::ActivationDescriptor acDesc; |
| 164 | |
| 165 | // in |
| 166 | // | |
| 167 | // nm |
| 168 | // / | |
| 169 | // ac | |
| 170 | // \ | |
| 171 | // ml |
| 172 | // | |
| 173 | // sm |
| 174 | // | |
| 175 | // ot |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 176 | armnn::IConnectableLayer* layer = net->AddInputLayer(0, "in"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 177 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 178 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 179 | armnn::IConnectableLayer* const normLayer = net->AddNormalizationLayer(nmDesc, "nm"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 180 | |
| 181 | layer->GetOutputSlot(0).Connect(normLayer->GetInputSlot(0)); |
| 182 | normLayer->GetOutputSlot(0).SetTensorInfo(desc); |
| 183 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 184 | layer = net->AddActivationLayer(acDesc, "ac"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 185 | |
| 186 | normLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 187 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 188 | |
| 189 | armnn::IConnectableLayer* prevLayer = layer; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 190 | layer = net->AddMultiplicationLayer("ml"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 191 | |
| 192 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 193 | normLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); |
| 194 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 195 | |
| 196 | prevLayer = layer; |
| 197 | armnn::SoftmaxDescriptor softmaxDescriptor; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 198 | layer = net->AddSoftmaxLayer(softmaxDescriptor, "sm"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 199 | |
| 200 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 201 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 202 | |
| 203 | prevLayer = layer; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 204 | layer = net->AddOutputLayer(0, "ot"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 205 | |
| 206 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 207 | |
| 208 | armnn::IRuntime::CreationOptions options; |
| 209 | armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); |
| 210 | |
| 211 | std::vector<armnn::BackendId> backends = { armnn::Compute::Undefined }; |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 212 | std::vector<std::string> errMessages; |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 213 | |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 214 | try |
| 215 | { |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 216 | Optimize(*net, backends, runtime->GetDeviceSpec(), armnn::OptimizerOptions(), errMessages); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 217 | FAIL("Should have thrown an exception."); |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 218 | } |
Rob Hughes | c013bc8 | 2021-07-14 09:31:31 +0100 | [diff] [blame] | 219 | catch (const armnn::InvalidArgumentException&) |
Mike Kelly | 3a613cc | 2020-09-29 20:50:35 +0100 | [diff] [blame] | 220 | { |
| 221 | // Different exceptions are thrown on different backends |
| 222 | } |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 223 | CHECK(errMessages.size() > 0); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 224 | } |
| 225 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 226 | TEST_CASE("OptimizeValidateWorkloadsUndefinedComputeDeviceWithFallback") |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 227 | { |
| 228 | const armnn::TensorInfo desc({3, 5}, armnn::DataType::Float32); |
| 229 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 230 | // build up the structure of the network |
| 231 | armnn::INetworkPtr net(armnn::INetwork::Create()); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 232 | |
| 233 | armnn::NormalizationDescriptor nmDesc; |
| 234 | armnn::ActivationDescriptor acDesc; |
| 235 | |
| 236 | // in |
| 237 | // | |
| 238 | // nm |
| 239 | // / | |
| 240 | // ac | |
| 241 | // \ | |
| 242 | // ml |
| 243 | // | |
| 244 | // sm |
| 245 | // | |
| 246 | // ot |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 247 | armnn::IConnectableLayer* layer = net->AddInputLayer(0, "in"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 248 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 249 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 250 | armnn::IConnectableLayer* const normLayer = net->AddNormalizationLayer(nmDesc, "nm"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 251 | |
| 252 | layer->GetOutputSlot(0).Connect(normLayer->GetInputSlot(0)); |
| 253 | normLayer->GetOutputSlot(0).SetTensorInfo(desc); |
| 254 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 255 | layer = net->AddActivationLayer(acDesc, "ac"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 256 | |
| 257 | normLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 258 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 259 | |
| 260 | armnn::IConnectableLayer* prevLayer = layer; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 261 | layer = net->AddMultiplicationLayer("ml"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 262 | |
| 263 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 264 | normLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); |
| 265 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 266 | |
| 267 | prevLayer = layer; |
| 268 | armnn::SoftmaxDescriptor softmaxDescriptor; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 269 | layer = net->AddSoftmaxLayer(softmaxDescriptor, "sm"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 270 | |
| 271 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 272 | layer->GetOutputSlot(0).SetTensorInfo(desc); |
| 273 | |
| 274 | prevLayer = layer; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 275 | layer = net->AddOutputLayer(0, "ot"); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 276 | |
| 277 | prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 278 | |
| 279 | armnn::IRuntime::CreationOptions options; |
| 280 | armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); |
| 281 | |
| 282 | std::vector<armnn::BackendId> backends = { armnn::Compute::Undefined, armnn::Compute::CpuRef }; |
| 283 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 284 | armnn::IOptimizedNetworkPtr optNet = armnn::Optimize(*net, backends, runtime->GetDeviceSpec()); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 285 | CHECK(optNet); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 286 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 287 | armnn::Graph& graph = GetGraphForTesting(optNet.get()); |
| 288 | graph.AllocateDynamicBuffers(); |
| 289 | |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 290 | // validate workloads |
| 291 | armnn::RefWorkloadFactory fact; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 292 | for (auto&& layer : graph) |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 293 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 294 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
| 295 | CHECK_NOTHROW( |
Derek Lamberti | 94a88d2 | 2019-12-10 21:12:59 +0000 | [diff] [blame] | 296 | layer->CreateWorkload(fact)); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 297 | } |
| 298 | } |
| 299 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 300 | TEST_CASE("OptimizeValidateWorkloadsDuplicateComputeDeviceWithFallback") |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 301 | { |
| 302 | // build up the structure of the network |
| 303 | armnn::INetworkPtr net(armnn::INetwork::Create()); |
| 304 | |
| 305 | armnn::IConnectableLayer* input = net->AddInputLayer(0); |
| 306 | |
| 307 | // This layer configuration isn't supported by CpuAcc but it allows to fallback to CpuRef. |
| 308 | armnn::NormalizationDescriptor descriptor; |
| 309 | armnn::IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor); |
| 310 | |
| 311 | armnn::IConnectableLayer* output = net->AddOutputLayer(0); |
| 312 | |
| 313 | input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0)); |
| 314 | normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 315 | |
| 316 | input->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo({ 1, 1, 4, 4 }, armnn::DataType::Float32)); |
| 317 | normalize->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo({ 1, 1, 4, 4 }, armnn::DataType::Float32)); |
| 318 | |
| 319 | armnn::IRuntime::CreationOptions options; |
| 320 | armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); |
| 321 | |
| 322 | std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, |
| 323 | armnn::Compute::GpuAcc, |
| 324 | armnn::Compute::CpuRef }; |
| 325 | |
| 326 | armnn::IOptimizedNetworkPtr optNet = armnn::Optimize(*net, backends, runtime->GetDeviceSpec()); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 327 | REQUIRE(optNet); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 328 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 329 | armnn::Graph& graph = GetGraphForTesting(optNet.get()); |
| 330 | graph.AllocateDynamicBuffers(); |
| 331 | |
| 332 | for (auto&& layer : graph) |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 333 | { |
| 334 | // If NEON is enabled, Input and Output layers are supported by CpuAcc, |
| 335 | // the other layers are supported by CpuRef. |
| 336 | // If only CL is enabled, Input and Output layers are supported by GpuAcc, |
| 337 | // the other layers are supported by CpuRef. |
| 338 | // If neither NEON, nor CL is enabled, all layers are supported by CpuRef. |
Matteo Martincigh | d95e906 | 2019-01-31 15:35:59 +0000 | [diff] [blame] | 339 | #if defined(ARMCOMPUTENEON_ENABLED) |
David Monahan | 7f82e08 | 2021-11-09 16:25:17 +0000 | [diff] [blame] | 340 | if (layer->GetType() == armnn::LayerType::Input) |
| 341 | { |
| 342 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
| 343 | } |
| 344 | else if (layer->GetType() == armnn::LayerType::Output) |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 345 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 346 | CHECK(layer->GetBackendId() == armnn::Compute::CpuAcc); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 347 | } |
| 348 | else if (layer->GetType() == armnn::LayerType::Normalization) |
| 349 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 350 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 351 | } |
Matteo Martincigh | d95e906 | 2019-01-31 15:35:59 +0000 | [diff] [blame] | 352 | #elif defined(ARMCOMPUTECL_ENABLED) |
David Monahan | 7f82e08 | 2021-11-09 16:25:17 +0000 | [diff] [blame] | 353 | if (layer->GetType() == armnn::LayerType::Input) |
| 354 | { |
| 355 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
| 356 | } |
| 357 | else if (layer->GetType() == armnn::LayerType::Output) |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 358 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 359 | CHECK(layer->GetBackendId() == armnn::Compute::GpuAcc); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 360 | } |
| 361 | else if (layer->GetType() == armnn::LayerType::Normalization) |
| 362 | { |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 363 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 364 | } |
| 365 | #else |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 366 | CHECK(layer->GetBackendId() == armnn::Compute::CpuRef); |
Aron Virginas-Tar | 7010400 | 2018-10-24 15:33:28 +0100 | [diff] [blame] | 367 | #endif |
| 368 | } |
| 369 | } |
| 370 | |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 371 | TEST_CASE("OptimizeNetworkCopy") |
| 372 | { |
| 373 | armnn::IRuntime::CreationOptions options; |
| 374 | armnn::IRuntimePtr runtime = armnn::IRuntime::Create(options); |
| 375 | std::vector<armnn::NetworkId> networkIds; |
| 376 | |
| 377 | const std::string layerName("convolution2d"); |
| 378 | const armnn::TensorInfo inputInfo ({ 1, 5, 5, 1 }, armnn::DataType::Float32); |
| 379 | const armnn::TensorInfo outputInfo({ 1, 2, 2, 1 }, armnn::DataType::Float32); |
| 380 | |
Cathal Corbett | 5b8093c | 2021-10-22 11:12:07 +0100 | [diff] [blame] | 381 | const armnn::TensorInfo weightsInfo({ 1, 3, 3, 1 }, armnn::DataType::Float32, 0.0f, 0, true); |
| 382 | const armnn::TensorInfo biasesInfo ({ 1 }, armnn::DataType::Float32, 0.0f, 0, true); |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 383 | |
| 384 | std::vector<float> weightsData = GenerateRandomData<float>(weightsInfo.GetNumElements()); |
| 385 | armnn::ConstTensor weights(weightsInfo, weightsData); |
| 386 | |
| 387 | std::vector<float> biasesData = GenerateRandomData<float>(biasesInfo.GetNumElements()); |
| 388 | armnn::ConstTensor biases(biasesInfo, biasesData); |
| 389 | |
| 390 | armnn::Convolution2dDescriptor descriptor; |
| 391 | descriptor.m_PadLeft = 1; |
| 392 | descriptor.m_PadRight = 1; |
| 393 | descriptor.m_PadTop = 1; |
| 394 | descriptor.m_PadBottom = 1; |
| 395 | descriptor.m_StrideX = 2; |
| 396 | descriptor.m_StrideY = 2; |
| 397 | descriptor.m_DilationX = 2; |
| 398 | descriptor.m_DilationY = 2; |
| 399 | descriptor.m_BiasEnabled = true; |
| 400 | descriptor.m_DataLayout = armnn::DataLayout::NHWC; |
| 401 | |
| 402 | armnn::INetworkPtr network = armnn::INetwork::Create(); |
| 403 | armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 404 | |
Keith Davis | 721e629 | 2022-05-17 10:06:53 +0100 | [diff] [blame] | 405 | armnn::IConnectableLayer* const convLayer = network->AddConvolution2dLayer(descriptor, layerName.c_str()); |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 406 | armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0); |
Keith Davis | 721e629 | 2022-05-17 10:06:53 +0100 | [diff] [blame] | 407 | armnn::IConnectableLayer* weightsLayer = network->AddConstantLayer(weights); |
| 408 | armnn::IConnectableLayer* biasLayer = network->AddConstantLayer(biases); |
| 409 | |
| 410 | weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo); |
| 411 | weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1u)); |
| 412 | |
| 413 | biasLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo); |
| 414 | biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2u)); |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 415 | |
| 416 | inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0)); |
| 417 | convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); |
| 418 | |
| 419 | inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo); |
| 420 | convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo); |
| 421 | |
| 422 | std::vector<armnn::BackendId> preferredBackends { "CpuRef" }; |
| 423 | armnn::ModelOptions modelOptions; |
Francis Murtagh | 626bd90 | 2022-06-21 13:16:23 +0000 | [diff] [blame] | 424 | armnn::OptimizerOptions optimizerOptions(false, false, false, false, modelOptions, false); |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 425 | std::vector<std::string> errorMessages; |
| 426 | |
| 427 | // optimize the network. |
| 428 | armnn::IOptimizedNetworkPtr optNet = Optimize(*network, |
| 429 | preferredBackends, |
| 430 | runtime->GetDeviceSpec(), |
| 431 | optimizerOptions, |
| 432 | armnn::Optional<std::vector<std::string>&>(errorMessages)); |
| 433 | |
| 434 | for (unsigned int i = 0; i < 2; ++i) |
| 435 | { |
| 436 | armnn::ModelOptions optimizedModelOptions; |
| 437 | auto copy = armnn::IOptimizedNetworkPtr(new armnn::IOptimizedNetwork(*optNet.get(), optimizedModelOptions), |
| 438 | &armnn::IOptimizedNetwork::Destroy); |
| 439 | |
| 440 | CHECK(copy); |
| 441 | |
| 442 | armnn::NetworkId netId; |
| 443 | std::string errorMessage; |
| 444 | |
| 445 | CHECK(armnn::Status::Success == runtime->LoadNetwork(netId, std::move(copy), errorMessage)); |
| 446 | |
| 447 | // Record the networkID for the loaded network |
| 448 | networkIds.emplace_back(netId); |
| 449 | } |
| 450 | armnn::NetworkId optNetId; |
| 451 | std::string errorMessage; |
| 452 | |
| 453 | // Load the original optNet |
| 454 | CHECK(armnn::Status::Success == runtime->LoadNetwork(optNetId, std::move(optNet), errorMessage)); |
| 455 | |
| 456 | std::vector<float> inputData = GenerateRandomData<float>(runtime->GetInputTensorInfo(optNetId, 0).GetNumElements()); |
| 457 | std::vector<float> outputData(runtime->GetOutputTensorInfo(optNetId, 0).GetNumElements()); |
| 458 | |
Cathal Corbett | 5b8093c | 2021-10-22 11:12:07 +0100 | [diff] [blame] | 459 | armnn::TensorInfo inputTensorInfo = runtime->GetInputTensorInfo(optNetId, 0); |
| 460 | inputTensorInfo.SetConstant(true); |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 461 | armnn::InputTensors inputTensors |
| 462 | { |
| 463 | { |
Cathal Corbett | 5b8093c | 2021-10-22 11:12:07 +0100 | [diff] [blame] | 464 | 0, armnn::ConstTensor(inputTensorInfo, inputData.data()) |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 465 | } |
| 466 | }; |
| 467 | armnn::OutputTensors outputTensors |
| 468 | { |
| 469 | { |
| 470 | 0, armnn::Tensor(runtime->GetOutputTensorInfo(optNetId, 0), outputData.data()) |
| 471 | } |
| 472 | }; |
| 473 | runtime->EnqueueWorkload(optNetId, inputTensors, outputTensors); |
| 474 | runtime->UnloadNetwork(optNetId); |
| 475 | |
| 476 | // Record the networkID for the loaded network |
| 477 | for (unsigned int i = 0; i < networkIds.size(); ++i) |
| 478 | { |
| 479 | armnn::NetworkId netId = networkIds[i]; |
| 480 | std::vector<float> copyOutputData(runtime->GetOutputTensorInfo(netId, 0).GetNumElements()); |
| 481 | |
Cathal Corbett | 5b8093c | 2021-10-22 11:12:07 +0100 | [diff] [blame] | 482 | armnn::TensorInfo inputTensorInfo2 = runtime->GetInputTensorInfo(netId, 0); |
| 483 | inputTensorInfo2.SetConstant(true); |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 484 | armnn::InputTensors copyInputTensors |
| 485 | { |
| 486 | { |
Cathal Corbett | 5b8093c | 2021-10-22 11:12:07 +0100 | [diff] [blame] | 487 | 0, armnn::ConstTensor(inputTensorInfo2, inputData.data()) |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 488 | } |
| 489 | }; |
| 490 | armnn::OutputTensors copyOutputTensors |
| 491 | { |
| 492 | { |
| 493 | 0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), copyOutputData.data()) |
| 494 | } |
| 495 | }; |
| 496 | runtime->EnqueueWorkload(netId, copyInputTensors, copyOutputTensors); |
| 497 | runtime->UnloadNetwork(netId); |
| 498 | |
| 499 | // Check results are identical to "original" version |
| 500 | for (unsigned int j = 0; j < outputData.size(); ++j) |
| 501 | { |
| 502 | CHECK(outputData[j] == copyOutputData[j]); |
| 503 | } |
| 504 | } |
| 505 | } |
| 506 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 507 | } |