blob: 7cb5ded773dc2c545b505186e1ec15188543b5af [file] [log] [blame]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "CommonTestUtils.hpp"
7#include "MockBackend.hpp"
8#include "MockBackendId.hpp"
9
10#include <Graph.hpp>
11#include <Network.hpp>
12
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010014
15#include <boost/test/unit_test.hpp>
16
17#include <unordered_map>
18
19using namespace armnn;
20
21namespace
22{
23
24// The expected number of layers, input and output slots in a subgraph after a test
25struct ExpectedSubgraphSize
26{
27 size_t m_NumInputSlots = 0;
28 size_t m_NumOutputSlots = 0;
29 size_t m_NumLayers = 0;
30};
31
32// Keep the layers organized by layer name
33using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
34
35// Used to convert input and output slots from reference type (as stored in graphs) to
36// pointer type (as stored in subgraphs)
37template <typename SlotType>
38SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
39{
40 return const_cast<SlotType*>(&input);
41}
42
43// Used to convert input and output slots from reference type (as stored in graphs) to
44// pointer type (as stored in subgraphs), array version
45template <typename SlotType>
46std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
47{
48 std::vector<SlotType*> output;
49 std::transform(input.begin(),
50 input.end(),
51 std::back_inserter(output),
52 [](const SlotType& inputItem)
53 {
54 return ConvertReferenceTypeToPointerType(inputItem);
55 });
56
57 return output;
58}
59
60// Convenience function to add an input layer to a graph
61Layer* AddInputLayer(Graph& graph,
62 const std::string& layerName,
63 const TensorInfo& inputInfo,
64 LayerBindingId inputId = 0)
65{
66 Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
67 BOOST_TEST(inputLayer);
68 inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
69 return inputLayer;
70}
71
72// Convenience function to add an output layer to a graph
73Layer* AddOutputLayer(Graph& graph,
74 const std::string& layerName)
75{
76 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
77 BOOST_TEST(outputLayer);
78 return outputLayer;
79}
80
81// Convenience function to add a convolution layer to a graph
82Convolution2dLayer* AddConvolutionLayer(Graph& graph,
83 LayerNameToLayerMap& layersInGraph,
84 const Convolution2dDescriptor& convolutionDescriptor,
85 const std::string& layerName,
86 const TensorInfo& weightInfo,
87 const TensorInfo& biasInfo,
88 const TensorInfo& outputInfo)
89{
90 Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
91 BOOST_TEST(convLayer);
92 SetWeightAndBias(convLayer, weightInfo, biasInfo);
93 convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
94 layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
95 return convLayer;
96}
97
98// Convenience function to add a pooling layer to a graph
99Pooling2dLayer* AddPoolingLayer(Graph& graph,
100 LayerNameToLayerMap& layersInGraph,
101 const Pooling2dDescriptor& poolingDescriptor,
102 const std::string& layerName,
103 const TensorInfo& outputInfo)
104{
105 Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
106 BOOST_TEST(poolingLayer);
107 poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
108 layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
109 return poolingLayer;
110}
111
112// Convenience function to add an addition layer to a graph
113AdditionLayer* AddAdditionaLayer(Graph& graph,
114 LayerNameToLayerMap& layersInGraph,
115 const std::string& layerName,
116 const TensorInfo& outputInfo)
117{
118 AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
119 BOOST_TEST(additionLayer);
120 additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
121 layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
122 return additionLayer;
123}
124
125// Convenience function to check that the given substitution matches the specified expected values
126void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
127 const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
128 const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
129 const SubgraphView::InputSlots& expectedSubstitutableInputSlots,
130 const SubgraphView::OutputSlots& expectedSubstitutableOutputSlots,
131 const SubgraphView::Layers& expectedSubstitutableLayers)
132{
133 const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
134 const SubgraphView::InputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetInputSlots();
135 const SubgraphView::OutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetOutputSlots();
136 const SubgraphView::Layers& substitutableSubgraphLayers = substitutableSubgraph.GetLayers();
137
138 const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
139 const SubgraphView::InputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetInputSlots();
140 const SubgraphView::OutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetOutputSlots();
141 const SubgraphView::Layers& replacementSubgraphLayers = replacementSubgraph.GetLayers();
142
143 BOOST_TEST(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
144 BOOST_TEST(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
145 BOOST_TEST(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
146
147 BOOST_TEST(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
148 BOOST_TEST(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
149 BOOST_TEST(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
150
151 BOOST_TEST(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
152 BOOST_TEST(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
153 BOOST_TEST(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
154
155 BOOST_TEST(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
156 BOOST_TEST(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
157 BOOST_TEST(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
158
159 BOOST_TEST(std::all_of(replacementSubgraphLayers.begin(),
160 replacementSubgraphLayers.end(),
161 [](const Layer* layer)
162 {
163 return layer->GetType() == LayerType::PreCompiled;
164 }));
165}
166
167// Convenience function to check that the given failed subgraph matches the specified expected values
168void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
169 const ExpectedSubgraphSize& expectedFailedSubgraphSize,
170 const SubgraphView::InputSlots& expectedFailedInputSlots,
171 const SubgraphView::OutputSlots& expectedFailedOutputSlots,
172 const SubgraphView::Layers& expectedFailedLayers)
173{
174 const SubgraphView::InputSlots& failedSubgraphInputSlots = failedSubgraph.GetInputSlots();
175 const SubgraphView::OutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetOutputSlots();
176 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
177
178 BOOST_TEST(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
179 BOOST_TEST(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
180 BOOST_TEST(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
181
182 BOOST_TEST(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
183 BOOST_TEST(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
184 BOOST_TEST(AreEqual(failedSubgraphLayers, expectedFailedLayers));
185}
186
187// Convenience function to check that the given untouched subgraph matches the specified expected values
188void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
189 const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
190 const SubgraphView::InputSlots& expectedUntouchedInputSlots,
191 const SubgraphView::OutputSlots& expectedUntouchedOutputSlots,
192 const SubgraphView::Layers& expectedUntouchedLayers)
193{
194 const SubgraphView::InputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetInputSlots();
195 const SubgraphView::OutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetOutputSlots();
196 const SubgraphView::Layers& untouchedSubgraphLayers = untouchedSubgraph.GetLayers();
197
198 BOOST_TEST(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
199 BOOST_TEST(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
200 BOOST_TEST(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
201
202 BOOST_TEST(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
203 BOOST_TEST(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
204 BOOST_TEST(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
205}
206
207// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
208SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
209{
210 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
211 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
212
213 Pooling2dDescriptor poolingDescriptor;
214 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
215 poolingDescriptor.m_PoolWidth = 2;
216 poolingDescriptor.m_PoolHeight = 2;
217 poolingDescriptor.m_StrideX = 2;
218 poolingDescriptor.m_StrideY = 2;
219 poolingDescriptor.m_PadLeft = 1;
220 poolingDescriptor.m_PadRight = 1;
221 poolingDescriptor.m_PadTop = 1;
222 poolingDescriptor.m_PadBottom = 1;
223 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
224 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
225
226 // Construct the graph
227 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
228 Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
229 "pooling layer", outputInfo);
230 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
231
232 // Connect the network
233 inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
234 poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
235
236 // Create the subgraph view for the whole network
237 return CreateSubgraphViewFrom(CreateInputsFrom({poolingLayer}),
238 CreateOutputsFrom({poolingLayer}),
239 {poolingLayer});
240}
241
242// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
243SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
244{
245 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
246 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
247
248 Pooling2dDescriptor poolingDescriptor;
249 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
250 poolingDescriptor.m_PoolWidth = 2;
251 poolingDescriptor.m_PoolHeight = 2;
252 poolingDescriptor.m_StrideX = 2;
253 poolingDescriptor.m_StrideY = 2;
254 poolingDescriptor.m_PadLeft = 1;
255 poolingDescriptor.m_PadRight = 1;
256 poolingDescriptor.m_PadTop = 1;
257 poolingDescriptor.m_PadBottom = 1;
258 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
259 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
260
261 // Construct the graph
262 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
263 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
264 "pooling1 layer", outputInfo);
265 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
266 "pooling2 layer", outputInfo);
267 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
268 "pooling3 layer", outputInfo);
269 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
270
271 // Connect the network
272 inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
273 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
274 pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
275 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
276
277 // Create the subgraph view for the whole network
278 return CreateSubgraphViewFrom(CreateInputsFrom({pooling1Layer}),
279 CreateOutputsFrom({pooling3Layer}),
280 {pooling1Layer,
281 pooling2Layer,
282 pooling3Layer});
283}
284
285// Creates a simple subgraph with only one convolution layer, supported by the mock backend
286SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
287{
288 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
289 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
290 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
291 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
292
293 Convolution2dDescriptor convolutionDescriptor;
294 convolutionDescriptor.m_StrideX = 1;
295 convolutionDescriptor.m_StrideY = 1;
296 convolutionDescriptor.m_BiasEnabled = true;
297 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
298
299 // Construct the graph
300 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
301 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
302 "conv layer", weightInfo, biasInfo, outputInfo);
303 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
304
305 // Connect the network
306 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
307 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
308
309 // Create the subgraph view for the whole network
310 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
311 CreateOutputsFrom({convLayer}),
312 {convLayer});
313}
314
315// Creates a subgraph with five convolutions layers, all supported by the mock backend
316SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
317{
318 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
319 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
320 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
321 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
322
323 Convolution2dDescriptor convolutionDescriptor;
324 convolutionDescriptor.m_StrideX = 1;
325 convolutionDescriptor.m_StrideY = 1;
326 convolutionDescriptor.m_BiasEnabled = true;
327 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
328
329 // Construct the graph
330 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
331 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
332 "conv1 layer", weightInfo, biasInfo, outputInfo);
333 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
334 "conv2 layer", weightInfo, biasInfo, outputInfo);
335 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
336 "conv3 layer", weightInfo, biasInfo, outputInfo);
337 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
338 "conv4 layer", weightInfo, biasInfo, outputInfo);
339 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
340 "conv5 layer", weightInfo, biasInfo, outputInfo);
341 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
342
343 // Connect the network
344 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
345 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
346 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
347 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
348 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
349 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
350
351 // Create the subgraph view for the whole network
352 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
353 CreateOutputsFrom({conv5Layer}),
354 {conv1Layer,
355 conv2Layer,
356 conv3Layer,
357 conv4Layer,
358 conv5Layer});
359}
360
361// Creates a subgraph with both supported and unsupported layers
362// (only convolutions are unsupported by the mock backend)
363SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
364{
365 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
366 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
367 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
368 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
369
370 Convolution2dDescriptor convolutionDescriptor;
371 convolutionDescriptor.m_StrideX = 1;
372 convolutionDescriptor.m_StrideY = 1;
373 convolutionDescriptor.m_BiasEnabled = true;
374 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
375
376 Pooling2dDescriptor poolingDescriptor;
377 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
378 poolingDescriptor.m_PoolWidth = 2;
379 poolingDescriptor.m_PoolHeight = 2;
380 poolingDescriptor.m_StrideX = 2;
381 poolingDescriptor.m_StrideY = 2;
382 poolingDescriptor.m_PadLeft = 1;
383 poolingDescriptor.m_PadRight = 1;
384 poolingDescriptor.m_PadTop = 1;
385 poolingDescriptor.m_PadBottom = 1;
386 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
387 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
388
389 // Construct the graph
390 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
391 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
392 "conv1 layer", weightInfo, biasInfo, outputInfo);
393 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
394 "pooling1 layer", outputInfo);
395 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
396 "pooling2 layer", outputInfo);
397 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
398 "conv2 layer", weightInfo, biasInfo, outputInfo);
399 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
400 "pooling3 layer", outputInfo);
401 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
402
403 // Connect the network
404 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
405 conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
406 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
407 pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
408 conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
409 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
410
411 // Create the subgraph view for the whole network
412 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
413 CreateOutputsFrom({pooling3Layer}),
414 {conv1Layer,
415 pooling1Layer,
416 pooling2Layer,
417 conv2Layer,
418 pooling3Layer});
419}
420
421// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
422SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
423{
424 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
425 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
426 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
427 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
428
429 Convolution2dDescriptor convolutionDescriptor;
430 convolutionDescriptor.m_StrideX = 1;
431 convolutionDescriptor.m_StrideY = 1;
432 convolutionDescriptor.m_BiasEnabled = true;
433 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
434
435 // Construct the graph
436 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
437 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
438 "conv layer unoptimizable", weightInfo, biasInfo,
439 outputInfo);
440 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
441
442 // Connect the network
443 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
444 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
445
446 // Create the subgraph view for the whole network
447 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
448 CreateOutputsFrom({convLayer}),
449 {convLayer});
450}
451
452// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
453SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
454{
455 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
456 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
457 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
458 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
459
460 Convolution2dDescriptor convolutionDescriptor;
461 convolutionDescriptor.m_StrideX = 1;
462 convolutionDescriptor.m_StrideY = 1;
463 convolutionDescriptor.m_BiasEnabled = true;
464 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
465
466 // Construct the graph
467 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
468 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
469 "conv1 layer", weightInfo, biasInfo, outputInfo);
470 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
471 "conv2 layer unoptimizable", weightInfo, biasInfo,
472 outputInfo);
473 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
474 "conv3 layer", weightInfo, biasInfo, outputInfo);
475 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
476 "conv4 layer unoptimizable", weightInfo, biasInfo,
477 outputInfo);
478 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
479 "conv5 layer", weightInfo, biasInfo, outputInfo);
480 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
481
482 // Connect the network
483 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
484 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
485 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
486 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
487 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
488 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
489
490 // Create the subgraph view for the whole network
491 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
492 CreateOutputsFrom({conv5Layer}),
493 {conv1Layer,
494 conv2Layer,
495 conv3Layer,
496 conv4Layer,
497 conv5Layer});
498}
499
500// Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
501// this is meant to test input slots coming from different layers
502SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
503{
504 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
505 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
506 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
507 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
508
509 Convolution2dDescriptor convolutionDescriptor;
510 convolutionDescriptor.m_StrideX = 1;
511 convolutionDescriptor.m_StrideY = 1;
512 convolutionDescriptor.m_BiasEnabled = true;
513 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
514
515 // Construct the graph
516 Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
517 Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
518 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
519 "conv1 layer", weightInfo, biasInfo, outputInfo);
520 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
521 "conv2 layer unoptimizable", weightInfo, biasInfo,
522 outputInfo);
523 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
524 "conv3 layer", weightInfo, biasInfo, outputInfo);
525 AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
526 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
527
528 // Connect the network
529 input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
530 input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
531 conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
532 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
533 conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
534 addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
535
536 // Create the subgraph view for the whole network
537 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
538 conv2Layer}),
539 CreateOutputsFrom({addLayer}),
540 {conv1Layer,
541 conv2Layer,
542 conv3Layer,
543 addLayer});
544}
545
546// The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
547void FullyUnsupporteSubgraphTestImpl1()
548{
549 Graph graph;
550 LayerNameToLayerMap layersInGraph;
551
552 // Create an unsupported subgraph
553 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
554 BOOST_TEST((subgraphPtr != nullptr));
555
556 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
557 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
558 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
559
560 BOOST_TEST(subgraphInputSlots.size() == 1);
561 BOOST_TEST(subgraphOutputSlots.size() == 1);
562 BOOST_TEST(subgraphLayers.size() == 1);
563
564 BOOST_TEST(Contains(layersInGraph, "pooling layer"));
565
566 // Create a mock backend object
567 auto backendObjPtr = CreateBackendObject(MockBackendId());
568 BOOST_TEST((backendObjPtr != nullptr));
569
570 // Optimize the subgraph
571 OptimizationViews optimizationViews;
572
573 // Check that the optimization is carried out correctly, but no optimization is performed
574 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
575
576 // =======================================================================
577 // The expected results are:
578 // - No substitutions
579 // - Exactly one failed subgraph, corresponding to the whole original one
580 // - No untouched subgraphs
581 // =======================================================================
582
583 // -----------------------
584 // Check the substitutions
585 // -----------------------
586
587 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
588
589 // --------------------------
590 // Check the failed subgraphs
591 // --------------------------
592
593 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
594 BOOST_TEST(failedSubgraphs.size() == 1);
595
596 CheckFailedSubgraph(failedSubgraphs.at(0),
597 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
598 subgraphInputSlots,
599 subgraphOutputSlots,
600 subgraphLayers);
601
602 // -----------------------------
603 // Check the untouched subgraphs
604 // -----------------------------
605
606 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
607}
608
609// The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
610void FullyUnsupporteSubgraphTestImpl2()
611{
612 Graph graph;
613 LayerNameToLayerMap layersInGraph;
614
615 // Create an unsupported subgraph
616 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
617 BOOST_TEST((subgraphPtr != nullptr));
618
619 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
620 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
621 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
622
623 BOOST_TEST(subgraphInputSlots.size() == 1);
624 BOOST_TEST(subgraphOutputSlots.size() == 1);
625 BOOST_TEST(subgraphLayers.size() == 3);
626
627 BOOST_TEST(Contains(layersInGraph, "pooling1 layer"));
628 BOOST_TEST(Contains(layersInGraph, "pooling2 layer"));
629 BOOST_TEST(Contains(layersInGraph, "pooling3 layer"));
630
631 // Create a mock backend object
632 auto backendObjPtr = CreateBackendObject(MockBackendId());
633 BOOST_TEST((backendObjPtr != nullptr));
634
635 // Optimize the subgraph
636 OptimizationViews optimizationViews;
637
638 // Check that the optimization is carried out correctly, but no optimization is performed
639 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
640
641 // =======================================================================
642 // The expected results are:
643 // - No substitutions
644 // - Exactly one failed subgraph, corresponding to the whole original one
645 // - No untouched subgraphs
646 // =======================================================================
647
648 // -----------------------
649 // Check the substitutions
650 // -----------------------
651
652 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
653
654 // --------------------------
655 // Check the failed subgraphs
656 // --------------------------
657
658 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
659 BOOST_TEST(failedSubgraphs.size() == 1);
660
661 std::vector<Layer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
662 layersInGraph.at("pooling2 layer"),
663 layersInGraph.at("pooling3 layer") };
664
665 const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
666
667 CheckFailedSubgraph(failedSubgraph,
668 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
669 subgraphInputSlots,
670 subgraphOutputSlots,
671 subgraphLayers);
672
673 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
674
675 BOOST_TEST(failedSubgraphLayers.front() + 0, expectedFailedLayers.at(0));
676 BOOST_TEST(failedSubgraphLayers.front() + 1, expectedFailedLayers.at(1));
677 BOOST_TEST(failedSubgraphLayers.front() + 2, expectedFailedLayers.at(2));
678
679 // -----------------------------
680 // Check the untouched subgraphs
681 // -----------------------------
682
683 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
684}
685
686// A simple case with only one layer (convolution) to optimize, supported by the mock backend
687void FullyOptimizableSubgraphTestImpl1()
688{
689 Graph graph;
690 LayerNameToLayerMap layersInGraph;
691
692 // Create a fully optimizable subgraph
693 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
694 BOOST_TEST((subgraphPtr != nullptr));
695
696 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
697 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
698 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
699
700 BOOST_TEST(subgraphInputSlots.size() == 1);
701 BOOST_TEST(subgraphOutputSlots.size() == 1);
702 BOOST_TEST(subgraphLayers.size() == 1);
703
704 BOOST_TEST(Contains(layersInGraph, "conv layer"));
705
706 // Create a mock backend object
707 auto backendObjPtr = CreateBackendObject(MockBackendId());
708 BOOST_TEST((backendObjPtr != nullptr));
709
710 // Optimize the subgraph
711 OptimizationViews optimizationViews;
712
713 // Check that the optimization is carried out correctly
714 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
715
716 // ===========================================================================================
717 // The expected results are:
718 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
719 // - No failed subgraphs
720 // - No untouched subgraphs
721 // ===========================================================================================
722
723 // -----------------------
724 // Check the substitutions
725 // -----------------------
726
727 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
728 BOOST_TEST(substitutions.size() == 1);
729
730 CheckSubstitution(substitutions.at(0),
731 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
732 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
733 subgraphInputSlots,
734 subgraphOutputSlots,
735 subgraphLayers);
736
737 // --------------------------
738 // Check the failed subgraphs
739 // --------------------------
740
741 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
742
743 // -----------------------------
744 // Check the untouched subgraphs
745 // -----------------------------
746
747 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
748}
749
750// A case with five layers (all convolutions) to optimize, all supported by the mock backend
751void FullyOptimizableSubgraphTestImpl2()
752{
753 Graph graph;
754 LayerNameToLayerMap layersInGraph;
755
756 // Create a fully optimizable subgraph
757 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
758 BOOST_TEST((subgraphPtr != nullptr));
759
760 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
761 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
762 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
763
764 BOOST_TEST(subgraphPtr->GetInputSlots().size() == 1);
765 BOOST_TEST(subgraphPtr->GetOutputSlots().size() == 1);
766 BOOST_TEST(subgraphPtr->GetLayers().size() == 5);
767
768 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
769 BOOST_TEST(Contains(layersInGraph, "conv2 layer"));
770 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
771 BOOST_TEST(Contains(layersInGraph, "conv4 layer"));
772 BOOST_TEST(Contains(layersInGraph, "conv5 layer"));
773
774 // Create a mock backend object
775 auto backendObjPtr = CreateBackendObject(MockBackendId());
776 BOOST_TEST((backendObjPtr != nullptr));
777
778 // Optimize the subgraph
779 OptimizationViews optimizationViews;
780
781 // Check that the optimization is carried out correctly
782 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
783
784 // ===========================================================================================
785 // The expected results are:
786 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
787 // - No failed subgraphs
788 // - No untouched subgraphs
789 // ===========================================================================================
790
791 // -----------------------
792 // Check the substitutions
793 // -----------------------
794
795 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
796 BOOST_TEST(substitutions.size() == 1);
797
798 std::list<Layer*> expectedSubstitutableLayers{ layersInGraph.at("conv1 layer"),
799 layersInGraph.at("conv2 layer"),
800 layersInGraph.at("conv3 layer"),
801 layersInGraph.at("conv4 layer"),
802 layersInGraph.at("conv5 layer") };
803
804 const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
805
806 CheckSubstitution(substitution,
807 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
808 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
809 subgraphInputSlots,
810 subgraphOutputSlots,
811 expectedSubstitutableLayers);
812
813 const SubgraphView::Layers& substitutableSubgraphLayers = substitution.m_SubstitutableSubgraph.GetLayers();
814
815 BOOST_TEST(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
816 BOOST_TEST(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
817 BOOST_TEST(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
818 BOOST_TEST(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
819 BOOST_TEST(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
820
821 // --------------------------
822 // Check the failed subgraphs
823 // --------------------------
824
825 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
826
827 // -----------------------------
828 // Check the untouched subgraphs
829 // -----------------------------
830
831 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
832}
833
834// The input subgraph contaions both supported and unsupported layers
835// (but only convolutions are unsupported by the mock backend)
836void PartiallySupportedSubgraphTestImpl()
837{
838 Graph graph;
839 LayerNameToLayerMap layersInGraph;
840
841 // Create a fully optimizable subgraph
842 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
843 BOOST_TEST((subgraphPtr != nullptr));
844
845 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
846 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
847 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
848
849 BOOST_TEST(subgraphInputSlots.size() == 1);
850 BOOST_TEST(subgraphOutputSlots.size() == 1);
851 BOOST_TEST(subgraphLayers.size() == 5);
852
853 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
854 BOOST_TEST(Contains(layersInGraph, "pooling1 layer"));
855 BOOST_TEST(Contains(layersInGraph, "pooling2 layer"));
856 BOOST_TEST(Contains(layersInGraph, "conv2 layer"));
857 BOOST_TEST(Contains(layersInGraph, "pooling3 layer"));
858
859 // Create a mock backend object
860 auto backendObjPtr = CreateBackendObject(MockBackendId());
861 BOOST_TEST((backendObjPtr != nullptr));
862
863 // Optimize the subgraph
864 OptimizationViews optimizationViews;
865
866 // Check that the optimization is carried out correctly
867 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
868
869 // ========================================================================
870 // The expected results are:
871 // - Exactly two substitution, corresponding to the supported layers
872 // - Exactly two failed subgraphs, corresponding to the unsupported layers
873 // - No untouched subgraphs
874 // ========================================================================
875
876 // -----------------------
877 // Check the substitutions
878 // -----------------------
879
880 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
881 BOOST_TEST(substitutions.size() == 2);
882
883 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
884 { 1, 1, 1 } };
885 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
886 { 1, 1, 1 } };
887 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
888 {
889 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
890 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlots())
891 };
892 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
893 {
894 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
895 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots())
896 };
897 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
898 {
899 { layersInGraph.at("conv1 layer") },
900 { layersInGraph.at("conv2 layer") }
901 };
902
903 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
904 {
905 CheckSubstitution(substitutions.at(substitutionIndex),
906 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
907 expectedReplacementSubgraphSizes.at(substitutionIndex),
908 expectedSubstitutableInputSlots.at(substitutionIndex),
909 expectedSubstitutableOutputSlots.at(substitutionIndex),
910 expectedSubstitutableLayers.at(substitutionIndex));
911 }
912
913 // --------------------------
914 // Check the failed subgraphs
915 // --------------------------
916
917 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
918 BOOST_TEST(failedSubgraphs.size() == 2);
919
920 std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
921 { 1, 1, 1 } };
922 std::vector<SubgraphView::InputSlots> expectedFailedInputSlots
923 {
924 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots()),
925 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots())
926 };
927 std::vector<SubgraphView::OutputSlots> expectedFailedOutputSlots
928 {
929 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots()),
930 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots())
931 };
932 std::vector<SubgraphView::Layers> expectedFailedLayers
933 {
934 { layersInGraph.at("pooling1 layer"),
935 layersInGraph.at("pooling2 layer") },
936 { layersInGraph.at("pooling3 layer") }
937 };
938
939 for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
940 {
941 CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
942 expectedFailedSubgraphSizes.at(failedIndex),
943 expectedFailedInputSlots.at(failedIndex),
944 expectedFailedOutputSlots.at(failedIndex),
945 expectedFailedLayers.at(failedIndex));
946 }
947
948 // -----------------------------
949 // Check the untouched subgraphs
950 // -----------------------------
951
952 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
953}
954
955// The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
956void FullyUnoptimizableSubgraphTestImpl1()
957{
958 Graph graph;
959 LayerNameToLayerMap layersInGraph;
960
961 // Create a fully optimizable subgraph
962 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
963 BOOST_TEST((subgraphPtr != nullptr));
964
965 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
966 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
967 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
968
969 BOOST_TEST(subgraphInputSlots.size() == 1);
970 BOOST_TEST(subgraphOutputSlots.size() == 1);
971 BOOST_TEST(subgraphLayers.size() == 1);
972
973 BOOST_TEST(Contains(layersInGraph, "conv layer unoptimizable"));
974
975 // Create a mock backend object
976 auto backendObjPtr = CreateBackendObject(MockBackendId());
977 BOOST_TEST((backendObjPtr != nullptr));
978
979 // Optimize the subgraph
980 OptimizationViews optimizationViews;
981
982 // Check that the optimization is carried out correctly
983 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
984
985 // ============================================================================
986 // The expected results are:
987 // - No substitutions
988 // - No failed subgraphs
989 // - Exactly one untouched subgraph, corresponding to the whole input subgraph
990 // ============================================================================
991
992 // -----------------------
993 // Check the substitutions
994 // -----------------------
995
996 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
997
998 // --------------------------
999 // Check the failed subgraphs
1000 // --------------------------
1001
1002 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1003
1004 // -----------------------------
1005 // Check the untouched subgraphs
1006 // -----------------------------
1007
1008 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1009 BOOST_TEST(untouchedSubgraphs.size() == 1);
1010
1011 CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1012 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
1013 subgraphInputSlots,
1014 subgraphOutputSlots,
1015 subgraphLayers);
1016}
1017
1018// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1019void PartiallyOptimizableSubgraphTestImpl1()
1020{
1021 Graph graph;
1022 LayerNameToLayerMap layersInGraph;
1023
1024 // Create a fully optimizable subgraph
1025 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1026 BOOST_TEST((subgraphPtr != nullptr));
1027
1028 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1029 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1030 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1031
1032 BOOST_TEST(subgraphInputSlots.size() == 1);
1033 BOOST_TEST(subgraphOutputSlots.size() == 1);
1034 BOOST_TEST(subgraphLayers.size() == 5);
1035
1036 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
1037 BOOST_TEST(Contains(layersInGraph, "conv2 layer unoptimizable"));
1038 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
1039 BOOST_TEST(Contains(layersInGraph, "conv4 layer unoptimizable"));
1040 BOOST_TEST(Contains(layersInGraph, "conv5 layer"));
1041
1042 // Create a mock backend object
1043 auto backendObjPtr = CreateBackendObject(MockBackendId());
1044 BOOST_TEST((backendObjPtr != nullptr));
1045
1046 // Optimize the subgraph
1047 OptimizationViews optimizationViews;
1048
1049 // Check that the optimization is carried out correctly
1050 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1051
1052 // ===============================================================================
1053 // The expected results are:
1054 // - Exactly three substitutions, corresponding to the optimizable layers
1055 // - No failed subgraphs
1056 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1057 // ===============================================================================
1058
1059 // -----------------------
1060 // Check the substitutions
1061 // -----------------------
1062
1063 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1064 BOOST_TEST(substitutions.size() == 3);
1065
1066 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1067 { 1, 1, 1 },
1068 { 1, 1, 1 } };
1069 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1070 { 1, 1, 1 },
1071 { 1, 1, 1 } };
1072 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
1073 {
1074 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
1075 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()),
1076 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlots())
1077 };
1078 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
1079 {
1080 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
1081 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots()),
1082 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots())
1083 };
1084 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
1085 {
1086 { layersInGraph.at("conv1 layer") },
1087 { layersInGraph.at("conv3 layer") },
1088 { layersInGraph.at("conv5 layer") }
1089 };
1090
1091 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1092 {
1093 CheckSubstitution(substitutions.at(substitutionIndex),
1094 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1095 expectedReplacementSubgraphSizes.at(substitutionIndex),
1096 expectedSubstitutableInputSlots.at(substitutionIndex),
1097 expectedSubstitutableOutputSlots.at(substitutionIndex),
1098 expectedSubstitutableLayers.at(substitutionIndex));
1099 }
1100
1101 // --------------------------
1102 // Check the failed subgraphs
1103 // --------------------------
1104
1105 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1106
1107 // -----------------------------
1108 // Check the untouched subgraphs
1109 // -----------------------------
1110
1111 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1112 BOOST_TEST(untouchedSubgraphs.size() == 2);
1113
1114 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 },
1115 { 1, 1, 1 } };
1116 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1117 {
1118 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots()),
1119 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetInputSlots())
1120 };
1121 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1122 {
1123 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()),
1124 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots())
1125 };
1126 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1127 {
1128 { layersInGraph.at("conv2 layer unoptimizable") },
1129 { layersInGraph.at("conv4 layer unoptimizable") }
1130 };
1131
1132 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1133 {
1134 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1135 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1136 expectedUntouchedInputSlots.at(untouchedIndex),
1137 expectedUntouchedOutputSlots.at(untouchedIndex),
1138 expectedUntouchedLayers.at(untouchedIndex));
1139 }
1140}
1141
1142// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1143// this is meant to test input slots coming from different layers
1144void PartiallyOptimizableSubgraphTestImpl2()
1145{
1146 Graph graph;
1147 LayerNameToLayerMap layersInGraph;
1148
1149 // Create a fully optimizable subgraph
1150 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1151 BOOST_TEST((subgraphPtr != nullptr));
1152
1153 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1154 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1155 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1156
1157 BOOST_TEST(subgraphInputSlots.size() == 2);
1158 BOOST_TEST(subgraphOutputSlots.size() == 1);
1159 BOOST_TEST(subgraphLayers.size() == 4);
1160
1161 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
1162 BOOST_TEST(Contains(layersInGraph, "conv2 layer unoptimizable"));
1163 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
1164 BOOST_TEST(Contains(layersInGraph, "add layer"));
1165
1166 // Create a mock backend object
1167 auto backendObjPtr = CreateBackendObject(MockBackendId());
1168 BOOST_TEST((backendObjPtr != nullptr));
1169
1170 // Optimize the subgraph
1171 OptimizationViews optimizationViews;
1172
1173 // Check that the optimization is carried out correctly
1174 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1175
1176 // ==============================================================================
1177 // The expected results are:
1178 // - Exactly one substitution, corresponding to the optimizable layers
1179 // - No failed subgraphs
1180 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1181 // ==============================================================================
1182
1183 // -----------------------
1184 // Check the substitutions
1185 // -----------------------
1186
1187 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1188 BOOST_TEST(substitutions.size() == 2);
1189
1190 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1191 { 2, 1, 2 } };
1192 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1193 { 2, 1, 1 } };
1194
1195 SubgraphView::InputSlots expectedSubstitutableSubgraph2InputSlots =
1196 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots());
1197 expectedSubstitutableSubgraph2InputSlots.push_back(
1198 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetInputSlot(0)));
1199
1200 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
1201 {
1202 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
1203 expectedSubstitutableSubgraph2InputSlots
1204 };
1205 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
1206 {
1207 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
1208 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots())
1209 };
1210 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
1211 {
1212 { layersInGraph.at("conv1 layer") },
1213 { layersInGraph.at("conv3 layer"),
1214 layersInGraph.at("add layer") }
1215 };
1216
1217 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1218 {
1219 CheckSubstitution(substitutions.at(substitutionIndex),
1220 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1221 expectedReplacementSubgraphSizes.at(substitutionIndex),
1222 expectedSubstitutableInputSlots.at(substitutionIndex),
1223 expectedSubstitutableOutputSlots.at(substitutionIndex),
1224 expectedSubstitutableLayers.at(substitutionIndex));
1225 }
1226
1227 // --------------------------
1228 // Check the failed subgraphs
1229 // --------------------------
1230
1231 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1232
1233 // -----------------------------
1234 // Check the untouched subgraphs
1235 // -----------------------------
1236
1237 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1238 BOOST_TEST(untouchedSubgraphs.size() == 1);
1239
1240 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 } };
1241 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1242 {
1243 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots())
1244 };
1245 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1246 {
1247 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())
1248 };
1249 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1250 {
1251 { layersInGraph.at("conv2 layer unoptimizable") }
1252 };
1253
1254 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1255 {
1256 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1257 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1258 expectedUntouchedInputSlots.at(untouchedIndex),
1259 expectedUntouchedOutputSlots.at(untouchedIndex),
1260 expectedUntouchedLayers.at(untouchedIndex));
1261 }
1262}
1263
1264} // Anonymous namespace
1265
1266BOOST_AUTO_TEST_SUITE(OptimizeSubGraph)
1267
1268BOOST_AUTO_TEST_CASE(FullyUnsupportedSubgraph1) { FullyUnsupporteSubgraphTestImpl1(); }
1269BOOST_AUTO_TEST_CASE(FullyUnsupportedSubgraph2) { FullyUnsupporteSubgraphTestImpl2(); }
1270BOOST_AUTO_TEST_CASE(FullyOptimizableSubgraph1) { FullyOptimizableSubgraphTestImpl1(); }
1271BOOST_AUTO_TEST_CASE(FullyOptimizableSubgraph2) { FullyOptimizableSubgraphTestImpl2(); }
1272BOOST_AUTO_TEST_CASE(PartiallySupportedSubgraph) { PartiallySupportedSubgraphTestImpl(); }
1273BOOST_AUTO_TEST_CASE(FullyUnoptimizableSubgraph) { FullyUnoptimizableSubgraphTestImpl1(); }
1274BOOST_AUTO_TEST_CASE(PartiallyOptimizableSubgraph1) { PartiallyOptimizableSubgraphTestImpl1(); }
1275BOOST_AUTO_TEST_CASE(PartiallyOptimizableSubgraph2) { PartiallyOptimizableSubgraphTestImpl2(); }
1276
1277BOOST_AUTO_TEST_SUITE_END()