blob: eea7ae824a88ecfa389f64e4966ee95f8d597b40 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Sadik Armagana097d2a2021-11-24 15:47:28 +00005#include <GraphUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +00006
Matteo Martincighe5b8eb92019-11-28 15:45:42 +00007#include <Graph.hpp>
8#include <Layer.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009
David Beckac42efd2018-09-26 17:41:13 +010010#include <armnn/TypesUtils.hpp>
11#include <armnn/Exceptions.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010012#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010013#include <armnn/utility/PolymorphicDowncast.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000014
15#include <armnn/backends/IBackendInternal.hpp>
David Beckac42efd2018-09-26 17:41:13 +010016
Colm Donelan0c479742021-12-10 12:43:54 +000017#include <armnn/backends/TensorHandle.hpp>
Derek Lamberti84da38b2019-06-13 11:40:08 +010018#include <backendsCommon/TensorHandleFactoryRegistry.hpp>
David Beckac42efd2018-09-26 17:41:13 +010019
Sadik Armagan1625efc2021-06-10 18:24:34 +010020#include <doctest/doctest.h>
David Beckac42efd2018-09-26 17:41:13 +010021
Sadik Armagan1625efc2021-06-10 18:24:34 +010022TEST_SUITE("Graph")
23{
24TEST_CASE("ClassGraph")
telsoa014fcda012018-03-09 14:13:49 +000025{
26 armnn::Graph graph;
Sadik Armagan1625efc2021-06-10 18:24:34 +010027 CHECK_NOTHROW(graph.AddLayer<armnn::InputLayer>(0, "layerA"));
28 CHECK(GraphHasNamedLayer(graph, "layerA"));
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Sadik Armagan1625efc2021-06-10 18:24:34 +010031TEST_CASE("TopologicalSort")
telsoa014fcda012018-03-09 14:13:49 +000032{
33 armnn::Graph graph;
34
35 armnn::ActivationDescriptor activationDefaults;
36
Sadik Armagan1625efc2021-06-10 18:24:34 +010037 CHECK_NOTHROW(graph.AddLayer<armnn::InputLayer>(0, "layerA"));
38 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerB"));
39 CHECK_NOTHROW(graph.AddLayer<armnn::AdditionLayer>("layerC"));
40 CHECK_NOTHROW(graph.AddLayer<armnn::OutputLayer>(0, "output"));
41 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerD"));
42 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerE"));
telsoa014fcda012018-03-09 14:13:49 +000043
44 armnn::Layer* const layerA = GetFirstLayerWithName(graph, "layerA");
45 armnn::Layer* const layerB = GetFirstLayerWithName(graph, "layerB");
46 armnn::Layer* const layerC = GetFirstLayerWithName(graph, "layerC");
47 armnn::Layer* const layerO = GetFirstLayerWithName(graph, "output");
48 armnn::Layer* const layerE = GetFirstLayerWithName(graph, "layerE");
49 armnn::Layer* const layerD = GetFirstLayerWithName(graph, "layerD");
50
telsoa01c577f2c2018-08-31 09:22:23 +010051 // Simple graph which branches and rejoins.
telsoa014fcda012018-03-09 14:13:49 +000052 // A
53 // / \'
54 // D E
55 // \ |
56 // \ B
57 // \|
58 // C
59 layerA->GetOutputSlot(0).Connect(layerD->GetInputSlot(0));
60 layerA->GetOutputSlot(0).Connect(layerE->GetInputSlot(0));
61 layerE->GetOutputSlot(0).Connect(layerB->GetInputSlot(0));
62 layerD->GetOutputSlot(0).Connect(layerC->GetInputSlot(0));
63 layerB->GetOutputSlot(0).Connect(layerC->GetInputSlot(1));
64 layerC->GetOutputSlot(0).Connect(layerO->GetInputSlot(0));
65
66 // check order is valid
Sadik Armagan1625efc2021-06-10 18:24:34 +010067 CHECK(CheckOrder(graph, layerA, layerD));
68 CHECK(CheckOrder(graph, layerA, layerE));
69 CHECK(CheckOrder(graph, layerD, layerC));
70 CHECK(CheckOrder(graph, layerE, layerB));
71 CHECK(CheckOrder(graph, layerB, layerC));
telsoa014fcda012018-03-09 14:13:49 +000072}
73
Sadik Armagan1625efc2021-06-10 18:24:34 +010074TEST_CASE("InsertNewLayerBefore")
telsoa014fcda012018-03-09 14:13:49 +000075{
76 armnn::Graph graph;
77 armnn::TensorInfo tensorInfo({ 1, 1, 1, 1 }, armnn::DataType::Float32);
78
79 std::vector<armnn::Layer*> order;
80
81 armnn::ActivationDescriptor activationDefaults;
Sadik Armagan1625efc2021-06-10 18:24:34 +010082 CHECK_NOTHROW(graph.AddLayer<armnn::InputLayer>(0, "layerA"));
83 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerB"));
84 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerC"));
85 CHECK_NOTHROW(graph.AddLayer<armnn::AdditionLayer>("layerD"));
86 CHECK_NOTHROW(graph.AddLayer<armnn::OutputLayer>(0, "output"));
telsoa014fcda012018-03-09 14:13:49 +000087
88 armnn::Layer* const layerA = GetFirstLayerWithName(graph, "layerA");
89 armnn::Layer* const layerB = GetFirstLayerWithName(graph, "layerB");
90 armnn::Layer* const layerC = GetFirstLayerWithName(graph, "layerC");
91 armnn::Layer* const layerD = GetFirstLayerWithName(graph, "layerD");
92 armnn::Layer* const layerO = GetFirstLayerWithName(graph, "output");
93
94 // A
95 // / \'
96 // B C
97 // \ /
98 // D
99 layerA->GetOutputSlot(0).SetTensorInfo(tensorInfo);
100 layerB->GetOutputSlot(0).SetTensorInfo(tensorInfo);
101 layerC->GetOutputSlot(0).SetTensorInfo(tensorInfo);
102 layerD->GetOutputSlot(0).SetTensorInfo(tensorInfo);
103
104 layerA->GetOutputSlot(0).Connect(layerB->GetInputSlot(0));
105 layerA->GetOutputSlot(0).Connect(layerC->GetInputSlot(0));
106 layerB->GetOutputSlot(0).Connect(layerD->GetInputSlot(0));
107 layerC->GetOutputSlot(0).Connect(layerD->GetInputSlot(1));
108 layerD->GetOutputSlot(0).Connect(layerO->GetInputSlot(0));
109
telsoa01c577f2c2018-08-31 09:22:23 +0100110 // Checks order is valid.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100111 CHECK(CheckOrder(graph, layerA, layerB));
112 CHECK(CheckOrder(graph, layerA, layerC));
113 CHECK(CheckOrder(graph, layerB, layerD));
114 CHECK(CheckOrder(graph, layerC, layerD));
telsoa014fcda012018-03-09 14:13:49 +0000115
116 // A
117 // / \'
118 // B C
119 // \ |
120 // \ E
121 // \|
122 // D
Sadik Armagan1625efc2021-06-10 18:24:34 +0100123 CHECK_NOTHROW(graph.InsertNewLayer<armnn::ActivationLayer>(layerD->GetInputSlot(1),
telsoa014fcda012018-03-09 14:13:49 +0000124 activationDefaults,
125 "layerE"));
126
127 armnn::Layer* const layerE = GetFirstLayerWithName(graph, "layerE");
128
telsoa01c577f2c2018-08-31 09:22:23 +0100129 // Checks order is valid.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100130 CHECK(CheckOrder(graph, layerA, layerB));
131 CHECK(CheckOrder(graph, layerA, layerC));
132 CHECK(CheckOrder(graph, layerB, layerD));
133 CHECK(CheckOrder(graph, layerC, layerE));
134 CHECK(CheckOrder(graph, layerE, layerD));
telsoa014fcda012018-03-09 14:13:49 +0000135
136 // A
137 // /|
138 // / F
139 // / |
140 // B C
141 // \ |
142 // \ E
143 // \|
144 // D
Sadik Armagan1625efc2021-06-10 18:24:34 +0100145 CHECK_NOTHROW(graph.InsertNewLayer<armnn::ActivationLayer>(layerC->GetInputSlot(0),
telsoa014fcda012018-03-09 14:13:49 +0000146 activationDefaults,
147 "layerF"));
148
149 armnn::Layer* const layerF = GetFirstLayerWithName(graph, "layerF");
150
telsoa01c577f2c2018-08-31 09:22:23 +0100151 // Checks order is valid.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100152 CHECK(CheckOrder(graph, layerA, layerB));
153 CHECK(CheckOrder(graph, layerA, layerF));
154 CHECK(CheckOrder(graph, layerF, layerC));
155 CHECK(CheckOrder(graph, layerB, layerD));
156 CHECK(CheckOrder(graph, layerC, layerE));
157 CHECK(CheckOrder(graph, layerE, layerD));
telsoa014fcda012018-03-09 14:13:49 +0000158}
159
Sadik Armagan1625efc2021-06-10 18:24:34 +0100160TEST_CASE("InsertNewLayerAfter")
telsoa01c577f2c2018-08-31 09:22:23 +0100161{
162 armnn::Graph graph;
163 armnn::TensorInfo tensorInfo({ 1, 1, 1, 1 }, armnn::DataType::Float32);
164
165 std::vector<armnn::Layer*> order;
166
167 armnn::ActivationDescriptor activationDefaults;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100168 CHECK_NOTHROW(graph.AddLayer<armnn::InputLayer>(0, "layerA"));
169 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerB"));
170 CHECK_NOTHROW(graph.AddLayer<armnn::ActivationLayer>(activationDefaults, "layerC"));
171 CHECK_NOTHROW(graph.AddLayer<armnn::AdditionLayer>("layerD"));
172 CHECK_NOTHROW(graph.AddLayer<armnn::OutputLayer>(0, "output"));
telsoa01c577f2c2018-08-31 09:22:23 +0100173
174 armnn::Layer* const layerA = GetFirstLayerWithName(graph, "layerA");
175 armnn::Layer* const layerB = GetFirstLayerWithName(graph, "layerB");
176 armnn::Layer* const layerC = GetFirstLayerWithName(graph, "layerC");
177 armnn::Layer* const layerD = GetFirstLayerWithName(graph, "layerD");
178 armnn::Layer* const layerO = GetFirstLayerWithName(graph, "output");
179
180 // A
181 // / \'
182 // B C
183 // \ /
184 // D
185 layerA->GetOutputSlot(0).SetTensorInfo(tensorInfo);
186 layerB->GetOutputSlot(0).SetTensorInfo(tensorInfo);
187 layerC->GetOutputSlot(0).SetTensorInfo(tensorInfo);
188 layerD->GetOutputSlot(0).SetTensorInfo(tensorInfo);
189
190 layerA->GetOutputSlot(0).Connect(layerB->GetInputSlot(0));
191 layerA->GetOutputSlot(0).Connect(layerC->GetInputSlot(0));
192 layerB->GetOutputSlot(0).Connect(layerD->GetInputSlot(0));
193 layerC->GetOutputSlot(0).Connect(layerD->GetInputSlot(1));
194 layerD->GetOutputSlot(0).Connect(layerO->GetInputSlot(0));
195
196 // Checks order is valid.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100197 CHECK(CheckOrder(graph, layerA, layerB));
198 CHECK(CheckOrder(graph, layerA, layerC));
199 CHECK(CheckOrder(graph, layerB, layerD));
200 CHECK(CheckOrder(graph, layerC, layerD));
telsoa01c577f2c2018-08-31 09:22:23 +0100201
202 // A
203 // / \'
204 // B C
205 // \ |
206 // \ E
207 // \|
208 // D
Sadik Armagan1625efc2021-06-10 18:24:34 +0100209 CHECK_NOTHROW(graph.InsertNewLayer<armnn::ActivationLayer>(layerC->GetOutputSlot(),
telsoa01c577f2c2018-08-31 09:22:23 +0100210 activationDefaults,
211 "layerE"));
212
213 armnn::Layer* const layerE = GetFirstLayerWithName(graph, "layerE");
214
215 // Checks order is valid.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100216 CHECK(CheckOrder(graph, layerA, layerB));
217 CHECK(CheckOrder(graph, layerA, layerC));
218 CHECK(CheckOrder(graph, layerB, layerD));
219 CHECK(CheckOrder(graph, layerC, layerE));
220 CHECK(CheckOrder(graph, layerE, layerD));
telsoa01c577f2c2018-08-31 09:22:23 +0100221
222
223 // A
224 // |
225 // F
226 // / \'
227 // B C
228 // \ |
229 // \ E
230 // \ /
231 // D
Sadik Armagan1625efc2021-06-10 18:24:34 +0100232 CHECK_NOTHROW(graph.InsertNewLayer<armnn::ActivationLayer>(layerA->GetOutputSlot(),
telsoa01c577f2c2018-08-31 09:22:23 +0100233 activationDefaults,
234 "layerF"));
235
236 armnn::Layer* const layerF = GetFirstLayerWithName(graph, "layerF");
237
238 // Checks order is valid.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100239 CHECK(CheckOrder(graph, layerA, layerF));
240 CHECK(CheckOrder(graph, layerF, layerB));
241 CHECK(CheckOrder(graph, layerF, layerC));
242 CHECK(CheckOrder(graph, layerB, layerD));
243 CHECK(CheckOrder(graph, layerC, layerE));
244 CHECK(CheckOrder(graph, layerE, layerD));
telsoa01c577f2c2018-08-31 09:22:23 +0100245}
246
telsoa014fcda012018-03-09 14:13:49 +0000247namespace
248{
249 using Edge = std::pair<const armnn::Layer*, const armnn::Layer*>;
250}
251
252static std::vector<Edge> GetEdgeList(const armnn::Graph& graph)
253{
254 std::vector<Edge> edges;
255
256 for (auto&& srcLayer: graph)
257 {
258 const unsigned int numOutputSlots = srcLayer->GetNumOutputSlots();
259 for (unsigned int s = 0; s < numOutputSlots; ++s)
260 {
261 const armnn::IOutputSlot& outputSlot = srcLayer->GetOutputSlot(s);
262 const unsigned int numConnections = outputSlot.GetNumConnections();
263 for (unsigned int c = 0; c < numConnections; ++c)
264 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100265 auto inputSlot = armnn::PolymorphicDowncast<const armnn::InputSlot*>(outputSlot.GetConnection(c));
telsoa014fcda012018-03-09 14:13:49 +0000266 edges.emplace_back(srcLayer, &inputSlot->GetOwningLayer());
267 }
268 }
269 }
270
271 return edges;
272}
273
274static void TestGraphAfterAddingCopyLayers(const armnn::Graph& graph, const armnn::Graph& origGraph)
275{
276 std::vector<Edge> origEdges = GetEdgeList(origGraph);
277 std::vector<Edge> newEdges = GetEdgeList(graph);
278
telsoa01c577f2c2018-08-31 09:22:23 +0100279 // Adding copy layers should not produce any duplicate edges.
telsoa014fcda012018-03-09 14:13:49 +0000280 {
281 std::vector<Edge> sortedNewEdges = newEdges;
282 std::sort(sortedNewEdges.begin(), sortedNewEdges.end());
283
284 auto last = std::unique(sortedNewEdges.begin(), sortedNewEdges.end());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100285 CHECK_MESSAGE(last == sortedNewEdges.end(), "New graph contains duplicate edges!");
telsoa014fcda012018-03-09 14:13:49 +0000286 }
287
telsoa01c577f2c2018-08-31 09:22:23 +0100288 // Each new edge must be tested.
telsoa014fcda012018-03-09 14:13:49 +0000289 while (!newEdges.empty())
290 {
291 const Edge edge = std::move(newEdges.back());
292 newEdges.pop_back();
293
294 // Edge present in the original graph?
295 int originalEdge = -1;
296 for (unsigned int i = 0; i < origEdges.size(); i++)
297 {
298 const Edge& origEdge = origEdges[i];
299 if (origEdge.first->GetNameStr() == edge.first->GetNameStr() &&
300 origEdge.second->GetNameStr() == edge.second->GetNameStr())
301 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100302 originalEdge = armnn::numeric_cast<int>(i);
telsoa014fcda012018-03-09 14:13:49 +0000303 }
304 }
305
306 if (originalEdge != -1)
307 {
308 // Each vertex should correspond to a layer.
309 const armnn::Layer* srcLayer = edge.first;
310 const armnn::Layer* dstLayer = edge.second;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100311 CHECK(srcLayer);
312 CHECK(dstLayer);
telsoa014fcda012018-03-09 14:13:49 +0000313
314 // Both layers must have the same compute device.
315 if (srcLayer && dstLayer)
316 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100317 CHECK((srcLayer->GetBackendId() == dstLayer->GetBackendId()));
telsoa014fcda012018-03-09 14:13:49 +0000318 }
319
telsoa01c577f2c2018-08-31 09:22:23 +0100320 // Marks edge in original graph as observed (by deleting it).
telsoa014fcda012018-03-09 14:13:49 +0000321 origEdges.erase(origEdges.begin() + originalEdge);
322 }
323 else
324 {
325 // Edge did not exist in the original graph.
326 // It must then be an edge connecting a layer and a copy layer.
327 const armnn::Layer* srcLayer = edge.first;
328 const armnn::Layer* dstLayer = edge.second;
329
330 if (srcLayer == nullptr || dstLayer == nullptr)
331 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100332 FAIL("At least one of the two ends of a new edge (" << edge.first << ", " << edge.second
333 << ") introduced after adding copy layers to a graph "
334 "correspond to a layer not known to the graph");
telsoa014fcda012018-03-09 14:13:49 +0000335 continue;
336 }
337
338 // One and only one of the two layers referenced by the edge should be present in the original graph.
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000339 const bool srcLayerInOrigGraph = GraphHasNamedLayer(origGraph, srcLayer->GetNameStr());
340 const bool dstLayerInOrigGraph = GraphHasNamedLayer(origGraph, dstLayer->GetNameStr());
telsoa014fcda012018-03-09 14:13:49 +0000341
342 if (srcLayerInOrigGraph == dstLayerInOrigGraph)
343 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100344 FAIL("A new edge ("
345 << edge.first->GetName()
346 << ", "
347 << edge.second->GetName()
348 << ") introduced after adding copy "
349 "layers to a graph is invalid. One of the ends should be present in the original "
350 "graph and the other should not, but "
351 << (srcLayerInOrigGraph ? "both are" : "none are"));
telsoa014fcda012018-03-09 14:13:49 +0000352 continue;
353 }
354
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000355 const armnn::Layer* copyLayer = srcLayerInOrigGraph ? dstLayer : srcLayer;
telsoa014fcda012018-03-09 14:13:49 +0000356 const armnn::Layer* nonCopyLayer = srcLayerInOrigGraph ? srcLayer : dstLayer;
357
telsoa01c577f2c2018-08-31 09:22:23 +0100358 // Finds all edges connecting the copy layer to other layers.
telsoa014fcda012018-03-09 14:13:49 +0000359 std::vector<Edge> adjEdges;
360 auto it = newEdges.begin();
361 while (it != newEdges.end())
362 {
363 Edge& newEdge = *it;
364 if (copyLayer == (srcLayerInOrigGraph ? newEdge.first : newEdge.second))
365 {
366 adjEdges.push_back(newEdge);
367
telsoa01c577f2c2018-08-31 09:22:23 +0100368 // Since the adjacent edge is immediately tested below, there is no need to consider it afterwards.
telsoa014fcda012018-03-09 14:13:49 +0000369 it = newEdges.erase(it);
370 }
371 else
372 {
373 it++;
374 }
375 }
376
377 if (adjEdges.empty())
378 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100379 FAIL("An edge connecting a layer and a copy layer exists, (" << edge.first << ", " <<
telsoa014fcda012018-03-09 14:13:49 +0000380 edge.second << "), but no other edges connecting the copy layer '" << copyLayer->GetName()
381 << "' to other layers could be found");
382 continue;
383 }
384
telsoa01c577f2c2018-08-31 09:22:23 +0100385 // Tests adjacent edges now.
telsoa014fcda012018-03-09 14:13:49 +0000386 for (const Edge& adjEdge : adjEdges)
387 {
telsoa01c577f2c2018-08-31 09:22:23 +0100388 // The adjacent edge must connect the copy layer to another layer.
telsoa014fcda012018-03-09 14:13:49 +0000389 const armnn::Layer* adjLayer = srcLayerInOrigGraph ? adjEdge.second : adjEdge.first;
390
391 if (!adjLayer)
392 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100393 FAIL("An edge (" << adjEdge.first << ", " << adjEdge.second <<") is adjacent to an "
394 "edge connecting a layer and a copy layer, (" << edge.first << ", " << edge.second <<
395 "), but the non-copy layer in the former does not correspond to a layer");
telsoa014fcda012018-03-09 14:13:49 +0000396 continue;
397 }
398
telsoa01c577f2c2018-08-31 09:22:23 +0100399 // Both layers must have different compute devices.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100400 CHECK((nonCopyLayer->GetBackendId() != adjLayer->GetBackendId()));
telsoa014fcda012018-03-09 14:13:49 +0000401
telsoa01c577f2c2018-08-31 09:22:23 +0100402 // There must exist an edge connecting both layers directly in the original graph.
telsoa014fcda012018-03-09 14:13:49 +0000403 {
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000404 const armnn::Layer* origEdgeSrc = srcLayerInOrigGraph ? nonCopyLayer : adjLayer;
405 const armnn::Layer* origEdgeDst = srcLayerInOrigGraph ? adjLayer : nonCopyLayer;
406
407 auto origEdgeIter = origEdges.begin();
408 for (; origEdgeIter != origEdges.end(); origEdgeIter++)
409 {
410 if (origEdgeIter->first->GetNameStr() == origEdgeSrc->GetNameStr() &&
411 origEdgeIter->second->GetNameStr() == origEdgeDst->GetNameStr())
412 {
413 break;
414 }
415 }
telsoa014fcda012018-03-09 14:13:49 +0000416
417 if (origEdgeIter != origEdges.end())
418 {
419 origEdges.erase(origEdgeIter);
420 }
421 else
422 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100423 FAIL("An edge (" << adjEdge.first << ", " << adjEdge.second << ") is adjacent to "
424 "an edge connecting a layer and a copy layer, (" << edge.first << ", " << edge.second <<
telsoa014fcda012018-03-09 14:13:49 +0000425 "), but there is no edge connecting the layers in the original graph");
426 }
427 }
428 }
429 }
430 }
431
Sadik Armagan1625efc2021-06-10 18:24:34 +0100432 CHECK_MESSAGE(origEdges.empty(), "Not all of the edges in the original graph correspond to paths in the new graph");
telsoa014fcda012018-03-09 14:13:49 +0000433}
434
435struct CopyLayersFixture
436{
437 CopyLayersFixture()
438 {
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000439 }
440
441 void InitialiseTestGraph()
442 {
telsoa014fcda012018-03-09 14:13:49 +0000443 using namespace armnn;
444 using namespace std;
445
446 Layer* const inputLayer = AddLayer<InputLayer>(0, "input");
David Beck33f0ae02018-10-18 15:13:56 +0100447 inputLayer->SetBackendId(Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000448
449 Convolution2dDescriptor convolutionDefaults;
450 Layer* const convLayer1 = AddLayer<Convolution2dLayer>(convolutionDefaults, "conv1");
David Beck33f0ae02018-10-18 15:13:56 +0100451 convLayer1->SetBackendId(Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000452
453 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
454
455 Layer* const convLayer2 = AddLayer<Convolution2dLayer>(convolutionDefaults, "conv2");
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000456 convLayer2->SetBackendId(Compute::CpuAcc);
telsoa014fcda012018-03-09 14:13:49 +0000457
458 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
459
Jim Flynne242f2d2019-05-22 14:24:13 +0100460 armnn::OriginsDescriptor concatDefaults(2);
461 Layer* const concatLayer = AddLayer<ConcatLayer>(concatDefaults, "concat");
462 concatLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000463
Jim Flynne242f2d2019-05-22 14:24:13 +0100464 convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
465 convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
telsoa014fcda012018-03-09 14:13:49 +0000466
467 armnn::ActivationDescriptor activationDefaults;
468 Layer* const actLayer = AddLayer<ActivationLayer>(activationDefaults, "act");
David Beck33f0ae02018-10-18 15:13:56 +0100469 actLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000470
Jim Flynne242f2d2019-05-22 14:24:13 +0100471 concatLayer->GetOutputSlot(0).Connect(actLayer->GetInputSlot(0));
telsoa014fcda012018-03-09 14:13:49 +0000472
473 armnn::SoftmaxDescriptor softmaxDefaults;
474 Layer* const softmaxLayer = AddLayer<SoftmaxLayer>(softmaxDefaults, "softmax");
David Beck33f0ae02018-10-18 15:13:56 +0100475 softmaxLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000476
477 actLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
478
479 Layer* const outputLayer = AddLayer<OutputLayer>(0, "output");
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000480 outputLayer->SetBackendId(armnn::Compute::CpuAcc);
telsoa014fcda012018-03-09 14:13:49 +0000481
482 softmaxLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
Derek Lamberti84da38b2019-06-13 11:40:08 +0100483
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000484 // Set the memory strategies - for this test should be DirectCompatibility for same backends,
485 // and CopyToTarget for different backends
Derek Lambertif674aa02019-08-01 15:56:25 +0100486 inputLayer->GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::DirectCompatibility);
487 convLayer1->GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::CopyToTarget);
488 convLayer1->GetOutputSlot(0).SetEdgeStrategy(1, EdgeStrategy::DirectCompatibility);
489 convLayer2->GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::CopyToTarget);
490 concatLayer->GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::DirectCompatibility);
491 actLayer->GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::DirectCompatibility);
492 softmaxLayer->GetOutputSlot(0).SetEdgeStrategy(0, EdgeStrategy::CopyToTarget);
telsoa014fcda012018-03-09 14:13:49 +0000493 }
494
495 armnn::TensorInfo m_TensorDesc;
496 armnn::Graph m_Graph;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100497 std::map<armnn::BackendId, std::unique_ptr<armnn::IBackendInternal>> m_Backends;
498 armnn::TensorHandleFactoryRegistry m_FactoryRegistry;
telsoa014fcda012018-03-09 14:13:49 +0000499
500private:
501
502 template <typename LayerType, typename... Args>
503 LayerType* AddLayer(Args&&... args)
504 {
505 LayerType* const layer = m_Graph.AddLayer<LayerType>(std::forward<Args>(args)...);
506
507 for (auto slot = layer->BeginOutputSlots(); slot != layer->EndOutputSlots(); ++slot)
508 {
509 slot->SetTensorInfo(m_TensorDesc);
510 }
511
512 return layer;
513 };
514};
515
Sadik Armagan1625efc2021-06-10 18:24:34 +0100516TEST_CASE_FIXTURE(CopyLayersFixture, "AddCopyLayers")
telsoa014fcda012018-03-09 14:13:49 +0000517{
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000518 InitialiseTestGraph();
telsoa014fcda012018-03-09 14:13:49 +0000519 const armnn::Graph origGraph(m_Graph);
Derek Lambertif674aa02019-08-01 15:56:25 +0100520 m_Graph.AddCompatibilityLayers(m_Backends, m_FactoryRegistry);
telsoa014fcda012018-03-09 14:13:49 +0000521
522 TestGraphAfterAddingCopyLayers(m_Graph, origGraph);
523}
524
Sadik Armagan1625efc2021-06-10 18:24:34 +0100525TEST_CASE_FIXTURE(CopyLayersFixture, "AddCopyLayersSeveralTimes")
telsoa014fcda012018-03-09 14:13:49 +0000526{
Matthew Bentham0cf01dc2019-07-30 08:24:12 +0000527 InitialiseTestGraph();
Derek Lambertif674aa02019-08-01 15:56:25 +0100528 m_Graph.AddCompatibilityLayers(m_Backends, m_FactoryRegistry);
telsoa014fcda012018-03-09 14:13:49 +0000529
Derek Lambertif674aa02019-08-01 15:56:25 +0100530 // Calling AddCompatibilityLayers() several times should not change the connections.
telsoa014fcda012018-03-09 14:13:49 +0000531 const std::vector<Edge> edges = GetEdgeList(m_Graph);
532 for (int i = 0; i < 4; ++i)
533 {
Derek Lambertif674aa02019-08-01 15:56:25 +0100534 m_Graph.AddCompatibilityLayers(m_Backends, m_FactoryRegistry);
telsoa014fcda012018-03-09 14:13:49 +0000535 const std::vector<Edge> otherEdges = GetEdgeList(m_Graph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100536 CHECK((edges == otherEdges));
telsoa014fcda012018-03-09 14:13:49 +0000537 }
538}
539
Sadik Armagan1625efc2021-06-10 18:24:34 +0100540TEST_CASE_FIXTURE(CopyLayersFixture, "CopyLayersAddedBetweenSameLayersHaveDifferentNames")
telsoa014fcda012018-03-09 14:13:49 +0000541{
542 armnn::Graph graph;
543
544 armnn::InputLayer* const inputLayer = graph.AddLayer<armnn::InputLayer>(0, "input");
David Beck33f0ae02018-10-18 15:13:56 +0100545 inputLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000546
547 armnn::ViewsDescriptor splitterDesc(2);
548 armnn::SplitterLayer* const splitterLayer = graph.AddLayer<armnn::SplitterLayer>(splitterDesc, "splitter");
David Beck33f0ae02018-10-18 15:13:56 +0100549 splitterLayer->SetBackendId(armnn::Compute::GpuAcc);
telsoa014fcda012018-03-09 14:13:49 +0000550
551 armnn::AdditionLayer* const additionLayer = graph.AddLayer<armnn::AdditionLayer>("addition");
David Beck33f0ae02018-10-18 15:13:56 +0100552 additionLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000553
554 armnn::OutputLayer* const outputLayer = graph.AddLayer<armnn::OutputLayer>(0, "output");
David Beck33f0ae02018-10-18 15:13:56 +0100555 outputLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000556
557 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
558 splitterLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
559 splitterLayer->GetOutputSlot(1).Connect(additionLayer->GetInputSlot(1));
560 additionLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
561
Derek Lambertif674aa02019-08-01 15:56:25 +0100562 inputLayer->GetOutputSlot(0).SetEdgeStrategy(0, armnn::EdgeStrategy::DirectCompatibility);
563 splitterLayer->GetOutputSlot(0).SetEdgeStrategy(0, armnn::EdgeStrategy::CopyToTarget);
564 splitterLayer->GetOutputSlot(1).SetEdgeStrategy(0, armnn::EdgeStrategy::CopyToTarget);
565 additionLayer->GetOutputSlot(0).SetEdgeStrategy(0, armnn::EdgeStrategy::DirectCompatibility);
Derek Lamberti84da38b2019-06-13 11:40:08 +0100566
Derek Lambertif674aa02019-08-01 15:56:25 +0100567 graph.AddCompatibilityLayers(m_Backends, m_FactoryRegistry);
telsoa014fcda012018-03-09 14:13:49 +0000568
569 std::vector<Edge> edges = GetEdgeList(graph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100570 CHECK(edges.size() == 6u);
telsoa014fcda012018-03-09 14:13:49 +0000571 std::sort(edges.begin(), edges.end());
572 auto last = std::unique(edges.begin(), edges.end());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100573 CHECK_MESSAGE(last == edges.end(), "Found duplicated edges after AddCompatibilityLayers()");
telsoa014fcda012018-03-09 14:13:49 +0000574}
575
Sadik Armagan1625efc2021-06-10 18:24:34 +0100576TEST_CASE("DuplicateLayerNames")
telsoa014fcda012018-03-09 14:13:49 +0000577{
578 armnn::Graph graph;
579
580 armnn::InputLayer* const inputLayer = graph.AddLayer<armnn::InputLayer>(0, "layer");
David Beck33f0ae02018-10-18 15:13:56 +0100581 inputLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000582
583 armnn::OutputLayer* const outputLayer = graph.AddLayer<armnn::OutputLayer>(0, "layer");
David Beck33f0ae02018-10-18 15:13:56 +0100584 outputLayer->SetBackendId(armnn::Compute::CpuRef);
telsoa014fcda012018-03-09 14:13:49 +0000585
586 inputLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
587
588 auto it = graph.TopologicalSort().begin();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100589 CHECK(((*it)->GetType() == armnn::LayerType::Input));
590 CHECK(((*std::next(it))->GetType() == armnn::LayerType::Output));
telsoa014fcda012018-03-09 14:13:49 +0000591}
592
Sadik Armagan1625efc2021-06-10 18:24:34 +0100593TEST_CASE("CheckGraphConstTensorSharing")
Finn Williams4422cec2021-03-22 17:51:06 +0000594{
595 armnn::Graph graph0;
596 const float* sharedWeightPtr;
597
598 {
599 armnn::Graph graph1;
600
Matthew Sloyan81beae32021-07-13 19:46:11 +0100601 armnn::ConstantLayer* const constantLayer = graph1.AddLayer<armnn::ConstantLayer>("ConstantLayer");
Finn Williams4422cec2021-03-22 17:51:06 +0000602
603 float weight = 1.0f;
Cathal Corbett5b8093c2021-10-22 11:12:07 +0100604 armnn::ConstTensor constTensor({{ 1, 1 }, armnn::DataType::Float32, 0.0f, 0, true}, &weight);
Matthew Sloyan81beae32021-07-13 19:46:11 +0100605 constantLayer->m_LayerOutput = std::make_shared<armnn::ScopedTensorHandle>(constTensor);;
606
Finn Williams4422cec2021-03-22 17:51:06 +0000607 // point sharedWeightPtr to graph1's const tensor
Matthew Sloyan81beae32021-07-13 19:46:11 +0100608 sharedWeightPtr = constantLayer->m_LayerOutput->GetConstTensor<float>();
Finn Williams4422cec2021-03-22 17:51:06 +0000609
610 graph0 = armnn::Graph(graph1);
611 // graph1 goes out of scope
612 }
613
Sadik Armagan1625efc2021-06-10 18:24:34 +0100614 CHECK(*sharedWeightPtr == 1);
Finn Williams4422cec2021-03-22 17:51:06 +0000615}
616
Nikhil Raj2e241752022-02-01 16:42:15 +0000617TEST_CASE("IConnectableLayerConstantTensorsByRef")
618{
619 using namespace armnn;
620 INetworkPtr net(INetwork::Create());
621
622 std::vector<uint8_t> falseData = {3};
623 ConstTensor falseTensor(TensorInfo({1}, DataType::Boolean, 0.0f, 0, true), falseData);
624 IConnectableLayer* constLayer = net->AddConstantLayer(falseTensor, "const");
625 constLayer->GetOutputSlot(0).SetTensorInfo(TensorInfo({1, 1, 1, 1}, DataType::Boolean));
626
627 const TensorInfo& constInfo = constLayer->GetOutputSlot(0).GetTensorInfo();
628
629 const void* weightData = constLayer->GetConstantTensorsByRef()[0].get()->GetConstTensor<void>();
630 auto weightValue = reinterpret_cast<const uint8_t*>(weightData);
631 CHECK(weightValue[0] == 3);
632 TensorInfo weightsInfo = constInfo;
633 ConstTensor weights(weightsInfo, weightData);
634 DepthwiseConvolution2dDescriptor desc;
Teresa Charlind9360332022-08-30 14:27:10 +0100635
636 const auto weightsLayer = net->AddConstantLayer(weights);
637
638 const void* resultDataWeights = weightsLayer->GetConstantTensorsByRef()[0].get()->GetConstTensor<void>();
639 auto resultValueWeights = reinterpret_cast<const uint8_t*>(resultDataWeights);
640 CHECK(resultValueWeights[0] == 3);
641
Nikhil Raj2e241752022-02-01 16:42:15 +0000642}
643
Sadik Armagan1625efc2021-06-10 18:24:34 +0100644}