blob: 599c9842b1c1ea678623a797e96ce4862a67b572 [file] [log] [blame]
Jan Eilers146b2e12019-08-16 10:45:30 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <boost/test/unit_test.hpp>
7
8#include <cl/ClBackend.hpp>
9#include <neon/NeonBackend.hpp>
10
11#include <Network.hpp>
12
13#include <vector>
14#include <string>
15
16using namespace armnn;
17
18BOOST_AUTO_TEST_SUITE(BackendsCompatibility)
19
20BOOST_AUTO_TEST_CASE(Neon_Cl_DirectCompatibility_Test)
21{
22 auto neonBackend = std::make_unique<NeonBackend>();
23 auto clBackend = std::make_unique<ClBackend>();
24
25 TensorHandleFactoryRegistry registry;
26 neonBackend->RegisterTensorHandleFactories(registry);
27 clBackend->RegisterTensorHandleFactories(registry);
28
29 const BackendId& neonBackendId = neonBackend->GetId();
30 const BackendId& clBackendId = clBackend->GetId();
31
32 BackendsMap backends;
33 backends[neonBackendId] = std::move(neonBackend);
34 backends[clBackendId] = std::move(clBackend);
35
36 armnn::Graph graph;
37
38 armnn::InputLayer* const inputLayer = graph.AddLayer<armnn::InputLayer>(0, "input");
39
40 inputLayer->SetBackendId(neonBackendId);
41
42 armnn::SoftmaxDescriptor smDesc;
43 armnn::SoftmaxLayer* const softmaxLayer1 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax1");
44 softmaxLayer1->SetBackendId(clBackendId);
45
46 armnn::SoftmaxLayer* const softmaxLayer2 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax2");
47 softmaxLayer2->SetBackendId(neonBackendId);
48
49 armnn::SoftmaxLayer* const softmaxLayer3 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax3");
50 softmaxLayer3->SetBackendId(clBackendId);
51
52 armnn::SoftmaxLayer* const softmaxLayer4 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax4");
53 softmaxLayer4->SetBackendId(neonBackendId);
54
55 armnn::OutputLayer* const outputLayer = graph.AddLayer<armnn::OutputLayer>(0, "output");
56 outputLayer->SetBackendId(clBackendId);
57
58 inputLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
59 softmaxLayer1->GetOutputSlot(0).Connect(softmaxLayer2->GetInputSlot(0));
60 softmaxLayer2->GetOutputSlot(0).Connect(softmaxLayer3->GetInputSlot(0));
61 softmaxLayer3->GetOutputSlot(0).Connect(softmaxLayer4->GetInputSlot(0));
62 softmaxLayer4->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
63
64 graph.TopologicalSort();
65
66 std::vector<std::string> errors;
67 auto result = SelectTensorHandleStrategy(graph, backends, registry, errors);
68
69 BOOST_TEST(result.m_Error == false);
70 BOOST_TEST(result.m_Warning == false);
71
72 OutputSlot& inputLayerOut = inputLayer->GetOutputSlot(0);
73 OutputSlot& softmaxLayer1Out = softmaxLayer1->GetOutputSlot(0);
74 OutputSlot& softmaxLayer2Out = softmaxLayer2->GetOutputSlot(0);
75 OutputSlot& softmaxLayer3Out = softmaxLayer3->GetOutputSlot(0);
76 OutputSlot& softmaxLayer4Out = softmaxLayer4->GetOutputSlot(0);
77
78 // Check that the correct factory was selected
79 BOOST_TEST(inputLayerOut.GetTensorHandleFactoryId() == "Arm/Cl/TensorHandleFactory");
80 BOOST_TEST(softmaxLayer1Out.GetTensorHandleFactoryId() == "Arm/Cl/TensorHandleFactory");
81 BOOST_TEST(softmaxLayer2Out.GetTensorHandleFactoryId() == "Arm/Cl/TensorHandleFactory");
82 BOOST_TEST(softmaxLayer3Out.GetTensorHandleFactoryId() == "Arm/Cl/TensorHandleFactory");
83 BOOST_TEST(softmaxLayer4Out.GetTensorHandleFactoryId() == "Arm/Cl/TensorHandleFactory");
84
85 // Check that the correct strategy was selected
86 BOOST_TEST((inputLayerOut.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
87 BOOST_TEST((softmaxLayer1Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
88 BOOST_TEST((softmaxLayer2Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
89 BOOST_TEST((softmaxLayer3Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
90 BOOST_TEST((softmaxLayer4Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
91
92 graph.AddCompatibilityLayers(backends, registry);
93
94 // Test for copy layers
95 int copyCount= 0;
96 graph.ForEachLayer([&copyCount](Layer* layer)
97 {
98 if (layer->GetType() == LayerType::MemCopy)
99 {
100 copyCount++;
101 }
102 });
103 BOOST_TEST(copyCount == 0);
104
105 // Test for import layers
106 int importCount= 0;
107 graph.ForEachLayer([&importCount](Layer *layer)
108 {
109 if (layer->GetType() == LayerType::MemImport)
110 {
111 importCount++;
112 }
113 });
114 BOOST_TEST(importCount == 0);
115}
116
117BOOST_AUTO_TEST_SUITE_END()