blob: ca3c56375740aca7aca45a1a8fc5a27676fb47ab [file] [log] [blame]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "CommonTestUtils.hpp"
7#include "MockBackend.hpp"
8#include "MockBackendId.hpp"
9
10#include <Graph.hpp>
11#include <Network.hpp>
12
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010014
15#include <boost/test/unit_test.hpp>
16
17#include <unordered_map>
18
19using namespace armnn;
20
21namespace
22{
23
24// The expected number of layers, input and output slots in a subgraph after a test
25struct ExpectedSubgraphSize
26{
27 size_t m_NumInputSlots = 0;
28 size_t m_NumOutputSlots = 0;
29 size_t m_NumLayers = 0;
30};
31
32// Keep the layers organized by layer name
33using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
34
35// Used to convert input and output slots from reference type (as stored in graphs) to
36// pointer type (as stored in subgraphs)
37template <typename SlotType>
38SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
39{
40 return const_cast<SlotType*>(&input);
41}
42
43// Used to convert input and output slots from reference type (as stored in graphs) to
44// pointer type (as stored in subgraphs), array version
45template <typename SlotType>
46std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
47{
48 std::vector<SlotType*> output;
49 std::transform(input.begin(),
50 input.end(),
51 std::back_inserter(output),
52 [](const SlotType& inputItem)
53 {
54 return ConvertReferenceTypeToPointerType(inputItem);
55 });
56
57 return output;
58}
59
60// Convenience function to add an input layer to a graph
61Layer* AddInputLayer(Graph& graph,
62 const std::string& layerName,
63 const TensorInfo& inputInfo,
64 LayerBindingId inputId = 0)
65{
66 Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
67 BOOST_TEST(inputLayer);
68 inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
69 return inputLayer;
70}
71
72// Convenience function to add an output layer to a graph
73Layer* AddOutputLayer(Graph& graph,
74 const std::string& layerName)
75{
76 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
77 BOOST_TEST(outputLayer);
78 return outputLayer;
79}
80
81// Convenience function to add a convolution layer to a graph
82Convolution2dLayer* AddConvolutionLayer(Graph& graph,
83 LayerNameToLayerMap& layersInGraph,
84 const Convolution2dDescriptor& convolutionDescriptor,
85 const std::string& layerName,
86 const TensorInfo& weightInfo,
87 const TensorInfo& biasInfo,
88 const TensorInfo& outputInfo)
89{
90 Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
91 BOOST_TEST(convLayer);
92 SetWeightAndBias(convLayer, weightInfo, biasInfo);
93 convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
94 layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
95 return convLayer;
96}
97
98// Convenience function to add a pooling layer to a graph
99Pooling2dLayer* AddPoolingLayer(Graph& graph,
100 LayerNameToLayerMap& layersInGraph,
101 const Pooling2dDescriptor& poolingDescriptor,
102 const std::string& layerName,
103 const TensorInfo& outputInfo)
104{
105 Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
106 BOOST_TEST(poolingLayer);
107 poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
108 layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
109 return poolingLayer;
110}
111
112// Convenience function to add an addition layer to a graph
113AdditionLayer* AddAdditionaLayer(Graph& graph,
114 LayerNameToLayerMap& layersInGraph,
115 const std::string& layerName,
116 const TensorInfo& outputInfo)
117{
118 AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
119 BOOST_TEST(additionLayer);
120 additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
121 layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
122 return additionLayer;
123}
124
125// Convenience function to check that the given substitution matches the specified expected values
126void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
127 const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
128 const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
129 const SubgraphView::InputSlots& expectedSubstitutableInputSlots,
130 const SubgraphView::OutputSlots& expectedSubstitutableOutputSlots,
131 const SubgraphView::Layers& expectedSubstitutableLayers)
132{
133 const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
134 const SubgraphView::InputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetInputSlots();
135 const SubgraphView::OutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetOutputSlots();
136 const SubgraphView::Layers& substitutableSubgraphLayers = substitutableSubgraph.GetLayers();
137
138 const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
139 const SubgraphView::InputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetInputSlots();
140 const SubgraphView::OutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetOutputSlots();
141 const SubgraphView::Layers& replacementSubgraphLayers = replacementSubgraph.GetLayers();
142
143 BOOST_TEST(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
144 BOOST_TEST(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
145 BOOST_TEST(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
146
147 BOOST_TEST(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
148 BOOST_TEST(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
149 BOOST_TEST(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
150
151 BOOST_TEST(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
152 BOOST_TEST(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
153 BOOST_TEST(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
154
155 BOOST_TEST(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
156 BOOST_TEST(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
157 BOOST_TEST(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
158
159 BOOST_TEST(std::all_of(replacementSubgraphLayers.begin(),
160 replacementSubgraphLayers.end(),
161 [](const Layer* layer)
162 {
163 return layer->GetType() == LayerType::PreCompiled;
164 }));
165}
166
167// Convenience function to check that the given failed subgraph matches the specified expected values
168void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
169 const ExpectedSubgraphSize& expectedFailedSubgraphSize,
170 const SubgraphView::InputSlots& expectedFailedInputSlots,
171 const SubgraphView::OutputSlots& expectedFailedOutputSlots,
172 const SubgraphView::Layers& expectedFailedLayers)
173{
174 const SubgraphView::InputSlots& failedSubgraphInputSlots = failedSubgraph.GetInputSlots();
175 const SubgraphView::OutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetOutputSlots();
176 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
177
178 BOOST_TEST(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
179 BOOST_TEST(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
180 BOOST_TEST(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
181
182 BOOST_TEST(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
183 BOOST_TEST(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
184 BOOST_TEST(AreEqual(failedSubgraphLayers, expectedFailedLayers));
185}
186
187// Convenience function to check that the given untouched subgraph matches the specified expected values
188void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
189 const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
190 const SubgraphView::InputSlots& expectedUntouchedInputSlots,
191 const SubgraphView::OutputSlots& expectedUntouchedOutputSlots,
192 const SubgraphView::Layers& expectedUntouchedLayers)
193{
194 const SubgraphView::InputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetInputSlots();
195 const SubgraphView::OutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetOutputSlots();
196 const SubgraphView::Layers& untouchedSubgraphLayers = untouchedSubgraph.GetLayers();
197
198 BOOST_TEST(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
199 BOOST_TEST(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
200 BOOST_TEST(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
201
202 BOOST_TEST(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
203 BOOST_TEST(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
204 BOOST_TEST(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
205}
206
207// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
208SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
209{
210 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
211 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
212
213 Pooling2dDescriptor poolingDescriptor;
214 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
215 poolingDescriptor.m_PoolWidth = 2;
216 poolingDescriptor.m_PoolHeight = 2;
217 poolingDescriptor.m_StrideX = 2;
218 poolingDescriptor.m_StrideY = 2;
219 poolingDescriptor.m_PadLeft = 1;
220 poolingDescriptor.m_PadRight = 1;
221 poolingDescriptor.m_PadTop = 1;
222 poolingDescriptor.m_PadBottom = 1;
223 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
224 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
225
226 // Construct the graph
227 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
228 Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
229 "pooling layer", outputInfo);
230 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
231
232 // Connect the network
233 inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
234 poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
235
236 // Create the subgraph view for the whole network
237 return CreateSubgraphViewFrom(CreateInputsFrom({poolingLayer}),
238 CreateOutputsFrom({poolingLayer}),
239 {poolingLayer});
240}
241
242// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
243SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
244{
245 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
246 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
247
248 Pooling2dDescriptor poolingDescriptor;
249 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
250 poolingDescriptor.m_PoolWidth = 2;
251 poolingDescriptor.m_PoolHeight = 2;
252 poolingDescriptor.m_StrideX = 2;
253 poolingDescriptor.m_StrideY = 2;
254 poolingDescriptor.m_PadLeft = 1;
255 poolingDescriptor.m_PadRight = 1;
256 poolingDescriptor.m_PadTop = 1;
257 poolingDescriptor.m_PadBottom = 1;
258 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
259 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
260
261 // Construct the graph
262 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
263 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
264 "pooling1 layer", outputInfo);
265 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
266 "pooling2 layer", outputInfo);
267 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
268 "pooling3 layer", outputInfo);
269 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
270
271 // Connect the network
272 inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
273 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
274 pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
275 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
276
277 // Create the subgraph view for the whole network
278 return CreateSubgraphViewFrom(CreateInputsFrom({pooling1Layer}),
279 CreateOutputsFrom({pooling3Layer}),
280 {pooling1Layer,
281 pooling2Layer,
282 pooling3Layer});
283}
284
285// Creates a simple subgraph with only one convolution layer, supported by the mock backend
286SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
287{
288 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
289 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
290 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
291 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
292
293 Convolution2dDescriptor convolutionDescriptor;
294 convolutionDescriptor.m_StrideX = 1;
295 convolutionDescriptor.m_StrideY = 1;
296 convolutionDescriptor.m_BiasEnabled = true;
297 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
298
299 // Construct the graph
300 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
301 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
302 "conv layer", weightInfo, biasInfo, outputInfo);
303 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
304
305 // Connect the network
306 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
307 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
308
309 // Create the subgraph view for the whole network
310 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
311 CreateOutputsFrom({convLayer}),
312 {convLayer});
313}
314
315// Creates a subgraph with five convolutions layers, all supported by the mock backend
316SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
317{
318 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
319 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
320 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
321 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
322
323 Convolution2dDescriptor convolutionDescriptor;
324 convolutionDescriptor.m_StrideX = 1;
325 convolutionDescriptor.m_StrideY = 1;
326 convolutionDescriptor.m_BiasEnabled = true;
327 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
328
329 // Construct the graph
330 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
331 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
332 "conv1 layer", weightInfo, biasInfo, outputInfo);
333 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
334 "conv2 layer", weightInfo, biasInfo, outputInfo);
335 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
336 "conv3 layer", weightInfo, biasInfo, outputInfo);
337 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
338 "conv4 layer", weightInfo, biasInfo, outputInfo);
339 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
340 "conv5 layer", weightInfo, biasInfo, outputInfo);
341 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
342
343 // Connect the network
344 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
345 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
346 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
347 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
348 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
349 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
350
351 // Create the subgraph view for the whole network
352 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
353 CreateOutputsFrom({conv5Layer}),
354 {conv1Layer,
355 conv2Layer,
356 conv3Layer,
357 conv4Layer,
358 conv5Layer});
359}
360
361// Creates a subgraph with both supported and unsupported layers
362// (only convolutions are unsupported by the mock backend)
363SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
364{
365 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
366 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
367 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
368 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
369
370 Convolution2dDescriptor convolutionDescriptor;
371 convolutionDescriptor.m_StrideX = 1;
372 convolutionDescriptor.m_StrideY = 1;
373 convolutionDescriptor.m_BiasEnabled = true;
374 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
375
376 Pooling2dDescriptor poolingDescriptor;
377 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
378 poolingDescriptor.m_PoolWidth = 2;
379 poolingDescriptor.m_PoolHeight = 2;
380 poolingDescriptor.m_StrideX = 2;
381 poolingDescriptor.m_StrideY = 2;
382 poolingDescriptor.m_PadLeft = 1;
383 poolingDescriptor.m_PadRight = 1;
384 poolingDescriptor.m_PadTop = 1;
385 poolingDescriptor.m_PadBottom = 1;
386 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
387 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
388
389 // Construct the graph
390 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
391 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
392 "conv1 layer", weightInfo, biasInfo, outputInfo);
393 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
394 "pooling1 layer", outputInfo);
395 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
396 "pooling2 layer", outputInfo);
397 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
398 "conv2 layer", weightInfo, biasInfo, outputInfo);
399 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
400 "pooling3 layer", outputInfo);
401 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
402
403 // Connect the network
404 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
405 conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
406 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
407 pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
408 conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
409 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
410
411 // Create the subgraph view for the whole network
412 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
413 CreateOutputsFrom({pooling3Layer}),
414 {conv1Layer,
415 pooling1Layer,
416 pooling2Layer,
417 conv2Layer,
418 pooling3Layer});
419}
420
421// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
422SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
423{
424 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
425 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
426 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
427 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
428
429 Convolution2dDescriptor convolutionDescriptor;
430 convolutionDescriptor.m_StrideX = 1;
431 convolutionDescriptor.m_StrideY = 1;
432 convolutionDescriptor.m_BiasEnabled = true;
433 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
434
435 // Construct the graph
436 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
437 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
438 "conv layer unoptimizable", weightInfo, biasInfo,
439 outputInfo);
440 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
441
442 // Connect the network
443 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
444 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
445
446 // Create the subgraph view for the whole network
447 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
448 CreateOutputsFrom({convLayer}),
449 {convLayer});
450}
451
452// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
453SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
454{
455 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
456 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
457 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
458 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
459
460 Convolution2dDescriptor convolutionDescriptor;
461 convolutionDescriptor.m_StrideX = 1;
462 convolutionDescriptor.m_StrideY = 1;
463 convolutionDescriptor.m_BiasEnabled = true;
464 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
465
466 // Construct the graph
467 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
468 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
469 "conv1 layer", weightInfo, biasInfo, outputInfo);
470 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
471 "conv2 layer unoptimizable", weightInfo, biasInfo,
472 outputInfo);
473 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
474 "conv3 layer", weightInfo, biasInfo, outputInfo);
475 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
476 "conv4 layer unoptimizable", weightInfo, biasInfo,
477 outputInfo);
478 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
479 "conv5 layer", weightInfo, biasInfo, outputInfo);
480 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
481
482 // Connect the network
483 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
484 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
485 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
486 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
487 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
488 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
489
490 // Create the subgraph view for the whole network
491 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
492 CreateOutputsFrom({conv5Layer}),
493 {conv1Layer,
494 conv2Layer,
495 conv3Layer,
496 conv4Layer,
497 conv5Layer});
498}
499
500// Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
501// this is meant to test input slots coming from different layers
502SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
503{
504 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
505 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
506 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
507 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
508
509 Convolution2dDescriptor convolutionDescriptor;
510 convolutionDescriptor.m_StrideX = 1;
511 convolutionDescriptor.m_StrideY = 1;
512 convolutionDescriptor.m_BiasEnabled = true;
513 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
514
515 // Construct the graph
516 Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
517 Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
518 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
519 "conv1 layer", weightInfo, biasInfo, outputInfo);
520 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
521 "conv2 layer unoptimizable", weightInfo, biasInfo,
522 outputInfo);
523 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
524 "conv3 layer", weightInfo, biasInfo, outputInfo);
525 AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
526 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
527
528 // Connect the network
529 input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
530 input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
531 conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
532 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
533 conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
534 addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
535
536 // Create the subgraph view for the whole network
537 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
538 conv2Layer}),
539 CreateOutputsFrom({addLayer}),
540 {conv1Layer,
541 conv2Layer,
542 conv3Layer,
543 addLayer});
544}
545
546// The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
547void FullyUnsupporteSubgraphTestImpl1()
548{
549 Graph graph;
550 LayerNameToLayerMap layersInGraph;
551
552 // Create an unsupported subgraph
553 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
554 BOOST_TEST((subgraphPtr != nullptr));
555
556 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
557 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
558 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
559
560 BOOST_TEST(subgraphInputSlots.size() == 1);
561 BOOST_TEST(subgraphOutputSlots.size() == 1);
562 BOOST_TEST(subgraphLayers.size() == 1);
563
564 BOOST_TEST(Contains(layersInGraph, "pooling layer"));
565
566 // Create a mock backend object
567 auto backendObjPtr = CreateBackendObject(MockBackendId());
568 BOOST_TEST((backendObjPtr != nullptr));
569
570 // Optimize the subgraph
571 OptimizationViews optimizationViews;
572
573 // Check that the optimization is carried out correctly, but no optimization is performed
574 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
575
576 // =======================================================================
577 // The expected results are:
578 // - No substitutions
579 // - Exactly one failed subgraph, corresponding to the whole original one
580 // - No untouched subgraphs
581 // =======================================================================
582
583 // -----------------------
584 // Check the substitutions
585 // -----------------------
586
587 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
588
589 // --------------------------
590 // Check the failed subgraphs
591 // --------------------------
592
593 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
594 BOOST_TEST(failedSubgraphs.size() == 1);
595
596 CheckFailedSubgraph(failedSubgraphs.at(0),
597 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
598 subgraphInputSlots,
599 subgraphOutputSlots,
600 subgraphLayers);
601
602 // -----------------------------
603 // Check the untouched subgraphs
604 // -----------------------------
605
606 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
607}
608
609// The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
610void FullyUnsupporteSubgraphTestImpl2()
611{
612 Graph graph;
613 LayerNameToLayerMap layersInGraph;
614
615 // Create an unsupported subgraph
616 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
617 BOOST_TEST((subgraphPtr != nullptr));
618
619 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
620 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
621 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
622
623 BOOST_TEST(subgraphInputSlots.size() == 1);
624 BOOST_TEST(subgraphOutputSlots.size() == 1);
625 BOOST_TEST(subgraphLayers.size() == 3);
626
627 BOOST_TEST(Contains(layersInGraph, "pooling1 layer"));
628 BOOST_TEST(Contains(layersInGraph, "pooling2 layer"));
629 BOOST_TEST(Contains(layersInGraph, "pooling3 layer"));
630
631 // Create a mock backend object
632 auto backendObjPtr = CreateBackendObject(MockBackendId());
633 BOOST_TEST((backendObjPtr != nullptr));
634
635 // Optimize the subgraph
636 OptimizationViews optimizationViews;
637
638 // Check that the optimization is carried out correctly, but no optimization is performed
639 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
640
641 // =======================================================================
642 // The expected results are:
643 // - No substitutions
644 // - Exactly one failed subgraph, corresponding to the whole original one
645 // - No untouched subgraphs
646 // =======================================================================
647
648 // -----------------------
649 // Check the substitutions
650 // -----------------------
651
652 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
653
654 // --------------------------
655 // Check the failed subgraphs
656 // --------------------------
657
658 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
659 BOOST_TEST(failedSubgraphs.size() == 1);
660
661 std::vector<Layer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
662 layersInGraph.at("pooling2 layer"),
663 layersInGraph.at("pooling3 layer") };
664
665 const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
666
667 CheckFailedSubgraph(failedSubgraph,
668 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
669 subgraphInputSlots,
670 subgraphOutputSlots,
671 subgraphLayers);
672
673 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
674
675 BOOST_TEST(failedSubgraphLayers.front() + 0, expectedFailedLayers.at(0));
676 BOOST_TEST(failedSubgraphLayers.front() + 1, expectedFailedLayers.at(1));
677 BOOST_TEST(failedSubgraphLayers.front() + 2, expectedFailedLayers.at(2));
678
679 // -----------------------------
680 // Check the untouched subgraphs
681 // -----------------------------
682
683 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
684}
685
686// A simple case with only one layer (convolution) to optimize, supported by the mock backend
687void FullyOptimizableSubgraphTestImpl1()
688{
689 Graph graph;
690 LayerNameToLayerMap layersInGraph;
691
692 // Create a fully optimizable subgraph
693 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
694 BOOST_TEST((subgraphPtr != nullptr));
695
696 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
697 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
698 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
699
700 BOOST_TEST(subgraphInputSlots.size() == 1);
701 BOOST_TEST(subgraphOutputSlots.size() == 1);
702 BOOST_TEST(subgraphLayers.size() == 1);
703
704 BOOST_TEST(Contains(layersInGraph, "conv layer"));
705
706 // Create a mock backend object
707 auto backendObjPtr = CreateBackendObject(MockBackendId());
708 BOOST_TEST((backendObjPtr != nullptr));
709
710 // Optimize the subgraph
711 OptimizationViews optimizationViews;
712
713 // Check that the optimization is carried out correctly
714 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
715
716 // ===========================================================================================
717 // The expected results are:
718 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
719 // - No failed subgraphs
720 // - No untouched subgraphs
721 // ===========================================================================================
722
723 // -----------------------
724 // Check the substitutions
725 // -----------------------
726
727 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
728 BOOST_TEST(substitutions.size() == 1);
729
730 CheckSubstitution(substitutions.at(0),
731 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
732 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
733 subgraphInputSlots,
734 subgraphOutputSlots,
735 subgraphLayers);
736
737 // --------------------------
738 // Check the failed subgraphs
739 // --------------------------
740
741 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
742
743 // -----------------------------
744 // Check the untouched subgraphs
745 // -----------------------------
746
747 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
748}
749
750// A case with five layers (all convolutions) to optimize, all supported by the mock backend
751void FullyOptimizableSubgraphTestImpl2()
752{
753 Graph graph;
754 LayerNameToLayerMap layersInGraph;
755
756 // Create a fully optimizable subgraph
757 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
758 BOOST_TEST((subgraphPtr != nullptr));
759
760 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
761 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
762 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
763
764 BOOST_TEST(subgraphPtr->GetInputSlots().size() == 1);
765 BOOST_TEST(subgraphPtr->GetOutputSlots().size() == 1);
766 BOOST_TEST(subgraphPtr->GetLayers().size() == 5);
767
768 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
769 BOOST_TEST(Contains(layersInGraph, "conv2 layer"));
770 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
771 BOOST_TEST(Contains(layersInGraph, "conv4 layer"));
772 BOOST_TEST(Contains(layersInGraph, "conv5 layer"));
773
774 // Create a mock backend object
775 auto backendObjPtr = CreateBackendObject(MockBackendId());
776 BOOST_TEST((backendObjPtr != nullptr));
777
778 // Optimize the subgraph
779 OptimizationViews optimizationViews;
780
781 // Check that the optimization is carried out correctly
782 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
783
784 // ===========================================================================================
785 // The expected results are:
786 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
787 // - No failed subgraphs
788 // - No untouched subgraphs
789 // ===========================================================================================
790
791 // -----------------------
792 // Check the substitutions
793 // -----------------------
794
795 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
796 BOOST_TEST(substitutions.size() == 1);
797
798 std::list<Layer*> expectedSubstitutableLayers{ layersInGraph.at("conv1 layer"),
799 layersInGraph.at("conv2 layer"),
800 layersInGraph.at("conv3 layer"),
801 layersInGraph.at("conv4 layer"),
802 layersInGraph.at("conv5 layer") };
803
804 const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
805
806 CheckSubstitution(substitution,
807 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
808 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
809 subgraphInputSlots,
810 subgraphOutputSlots,
811 expectedSubstitutableLayers);
812
813 const SubgraphView::Layers& substitutableSubgraphLayers = substitution.m_SubstitutableSubgraph.GetLayers();
814
815 BOOST_TEST(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
816 BOOST_TEST(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
817 BOOST_TEST(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
818 BOOST_TEST(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
819 BOOST_TEST(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
820
821 // --------------------------
822 // Check the failed subgraphs
823 // --------------------------
824
825 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
826
827 // -----------------------------
828 // Check the untouched subgraphs
829 // -----------------------------
830
831 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
832}
833
834// The input subgraph contaions both supported and unsupported layers
835// (but only convolutions are unsupported by the mock backend)
836void PartiallySupportedSubgraphTestImpl()
837{
838 Graph graph;
839 LayerNameToLayerMap layersInGraph;
840
841 // Create a fully optimizable subgraph
842 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
843 BOOST_TEST((subgraphPtr != nullptr));
844
845 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
846 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
847 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
848
849 BOOST_TEST(subgraphInputSlots.size() == 1);
850 BOOST_TEST(subgraphOutputSlots.size() == 1);
851 BOOST_TEST(subgraphLayers.size() == 5);
852
853 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
854 BOOST_TEST(Contains(layersInGraph, "pooling1 layer"));
855 BOOST_TEST(Contains(layersInGraph, "pooling2 layer"));
856 BOOST_TEST(Contains(layersInGraph, "conv2 layer"));
857 BOOST_TEST(Contains(layersInGraph, "pooling3 layer"));
858
859 // Create a mock backend object
860 auto backendObjPtr = CreateBackendObject(MockBackendId());
861 BOOST_TEST((backendObjPtr != nullptr));
862
863 // Optimize the subgraph
864 OptimizationViews optimizationViews;
865
866 // Check that the optimization is carried out correctly
867 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
868
869 // ========================================================================
870 // The expected results are:
871 // - Exactly two substitution, corresponding to the supported layers
872 // - Exactly two failed subgraphs, corresponding to the unsupported layers
873 // - No untouched subgraphs
874 // ========================================================================
875
876 // -----------------------
877 // Check the substitutions
878 // -----------------------
879
Rob Hughes30db8ad2019-11-08 15:50:10 +0000880 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100881 BOOST_TEST(substitutions.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +0000882 // Sort into a consistent order
883 std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
884 return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
885 s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0;
886 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100887
888 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
889 { 1, 1, 1 } };
890 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
891 { 1, 1, 1 } };
892 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
893 {
894 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
895 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlots())
896 };
897 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
898 {
899 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
900 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots())
901 };
902 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
903 {
904 { layersInGraph.at("conv1 layer") },
905 { layersInGraph.at("conv2 layer") }
906 };
907
908 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
909 {
910 CheckSubstitution(substitutions.at(substitutionIndex),
911 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
912 expectedReplacementSubgraphSizes.at(substitutionIndex),
913 expectedSubstitutableInputSlots.at(substitutionIndex),
914 expectedSubstitutableOutputSlots.at(substitutionIndex),
915 expectedSubstitutableLayers.at(substitutionIndex));
916 }
917
918 // --------------------------
919 // Check the failed subgraphs
920 // --------------------------
921
Rob Hughes30db8ad2019-11-08 15:50:10 +0000922 OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100923 BOOST_TEST(failedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +0000924 // Sort into a consistent order
925 std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
926 return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
927 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100928
929 std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
930 { 1, 1, 1 } };
931 std::vector<SubgraphView::InputSlots> expectedFailedInputSlots
932 {
933 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots()),
934 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots())
935 };
936 std::vector<SubgraphView::OutputSlots> expectedFailedOutputSlots
937 {
938 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots()),
939 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots())
940 };
941 std::vector<SubgraphView::Layers> expectedFailedLayers
942 {
943 { layersInGraph.at("pooling1 layer"),
944 layersInGraph.at("pooling2 layer") },
945 { layersInGraph.at("pooling3 layer") }
946 };
947
948 for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
949 {
950 CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
951 expectedFailedSubgraphSizes.at(failedIndex),
952 expectedFailedInputSlots.at(failedIndex),
953 expectedFailedOutputSlots.at(failedIndex),
954 expectedFailedLayers.at(failedIndex));
955 }
956
957 // -----------------------------
958 // Check the untouched subgraphs
959 // -----------------------------
960
961 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
962}
963
964// The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
965void FullyUnoptimizableSubgraphTestImpl1()
966{
967 Graph graph;
968 LayerNameToLayerMap layersInGraph;
969
970 // Create a fully optimizable subgraph
971 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
972 BOOST_TEST((subgraphPtr != nullptr));
973
974 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
975 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
976 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
977
978 BOOST_TEST(subgraphInputSlots.size() == 1);
979 BOOST_TEST(subgraphOutputSlots.size() == 1);
980 BOOST_TEST(subgraphLayers.size() == 1);
981
982 BOOST_TEST(Contains(layersInGraph, "conv layer unoptimizable"));
983
984 // Create a mock backend object
985 auto backendObjPtr = CreateBackendObject(MockBackendId());
986 BOOST_TEST((backendObjPtr != nullptr));
987
988 // Optimize the subgraph
989 OptimizationViews optimizationViews;
990
991 // Check that the optimization is carried out correctly
992 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
993
994 // ============================================================================
995 // The expected results are:
996 // - No substitutions
997 // - No failed subgraphs
998 // - Exactly one untouched subgraph, corresponding to the whole input subgraph
999 // ============================================================================
1000
1001 // -----------------------
1002 // Check the substitutions
1003 // -----------------------
1004
1005 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
1006
1007 // --------------------------
1008 // Check the failed subgraphs
1009 // --------------------------
1010
1011 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1012
1013 // -----------------------------
1014 // Check the untouched subgraphs
1015 // -----------------------------
1016
1017 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1018 BOOST_TEST(untouchedSubgraphs.size() == 1);
1019
1020 CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1021 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
1022 subgraphInputSlots,
1023 subgraphOutputSlots,
1024 subgraphLayers);
1025}
1026
1027// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1028void PartiallyOptimizableSubgraphTestImpl1()
1029{
1030 Graph graph;
1031 LayerNameToLayerMap layersInGraph;
1032
1033 // Create a fully optimizable subgraph
1034 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1035 BOOST_TEST((subgraphPtr != nullptr));
1036
1037 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1038 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1039 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1040
1041 BOOST_TEST(subgraphInputSlots.size() == 1);
1042 BOOST_TEST(subgraphOutputSlots.size() == 1);
1043 BOOST_TEST(subgraphLayers.size() == 5);
1044
1045 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
1046 BOOST_TEST(Contains(layersInGraph, "conv2 layer unoptimizable"));
1047 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
1048 BOOST_TEST(Contains(layersInGraph, "conv4 layer unoptimizable"));
1049 BOOST_TEST(Contains(layersInGraph, "conv5 layer"));
1050
1051 // Create a mock backend object
1052 auto backendObjPtr = CreateBackendObject(MockBackendId());
1053 BOOST_TEST((backendObjPtr != nullptr));
1054
1055 // Optimize the subgraph
1056 OptimizationViews optimizationViews;
1057
1058 // Check that the optimization is carried out correctly
1059 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1060
1061 // ===============================================================================
1062 // The expected results are:
1063 // - Exactly three substitutions, corresponding to the optimizable layers
1064 // - No failed subgraphs
1065 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1066 // ===============================================================================
1067
1068 // -----------------------
1069 // Check the substitutions
1070 // -----------------------
1071
Rob Hughes30db8ad2019-11-08 15:50:10 +00001072 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001073 BOOST_TEST(substitutions.size() == 3);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001074 // Sort into a consistent order
1075 std::sort(substitutions.begin(), substitutions.end(),
1076 [](auto s1, auto s2) { return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
1077 s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0; });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001078
1079 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1080 { 1, 1, 1 },
1081 { 1, 1, 1 } };
1082 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1083 { 1, 1, 1 },
1084 { 1, 1, 1 } };
1085 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
1086 {
1087 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
1088 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()),
1089 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlots())
1090 };
1091 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
1092 {
1093 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
1094 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots()),
1095 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots())
1096 };
1097 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
1098 {
1099 { layersInGraph.at("conv1 layer") },
1100 { layersInGraph.at("conv3 layer") },
1101 { layersInGraph.at("conv5 layer") }
1102 };
1103
1104 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1105 {
1106 CheckSubstitution(substitutions.at(substitutionIndex),
1107 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1108 expectedReplacementSubgraphSizes.at(substitutionIndex),
1109 expectedSubstitutableInputSlots.at(substitutionIndex),
1110 expectedSubstitutableOutputSlots.at(substitutionIndex),
1111 expectedSubstitutableLayers.at(substitutionIndex));
1112 }
1113
1114 // --------------------------
1115 // Check the failed subgraphs
1116 // --------------------------
1117
1118 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1119
1120 // -----------------------------
1121 // Check the untouched subgraphs
1122 // -----------------------------
1123
Rob Hughes30db8ad2019-11-08 15:50:10 +00001124 OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001125 BOOST_TEST(untouchedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001126 // Sort into a consistent order
1127 std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1128 return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
1129 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001130
1131 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 },
1132 { 1, 1, 1 } };
1133 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1134 {
1135 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots()),
1136 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetInputSlots())
1137 };
1138 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1139 {
1140 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()),
1141 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots())
1142 };
1143 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1144 {
1145 { layersInGraph.at("conv2 layer unoptimizable") },
1146 { layersInGraph.at("conv4 layer unoptimizable") }
1147 };
1148
1149 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1150 {
1151 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1152 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1153 expectedUntouchedInputSlots.at(untouchedIndex),
1154 expectedUntouchedOutputSlots.at(untouchedIndex),
1155 expectedUntouchedLayers.at(untouchedIndex));
1156 }
1157}
1158
1159// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1160// this is meant to test input slots coming from different layers
1161void PartiallyOptimizableSubgraphTestImpl2()
1162{
1163 Graph graph;
1164 LayerNameToLayerMap layersInGraph;
1165
Rob Hughes30db8ad2019-11-08 15:50:10 +00001166 // Create a partially optimizable subgraph
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001167 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1168 BOOST_TEST((subgraphPtr != nullptr));
1169
1170 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1171 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1172 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1173
1174 BOOST_TEST(subgraphInputSlots.size() == 2);
1175 BOOST_TEST(subgraphOutputSlots.size() == 1);
1176 BOOST_TEST(subgraphLayers.size() == 4);
1177
1178 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
1179 BOOST_TEST(Contains(layersInGraph, "conv2 layer unoptimizable"));
1180 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
1181 BOOST_TEST(Contains(layersInGraph, "add layer"));
1182
1183 // Create a mock backend object
1184 auto backendObjPtr = CreateBackendObject(MockBackendId());
1185 BOOST_TEST((backendObjPtr != nullptr));
1186
1187 // Optimize the subgraph
1188 OptimizationViews optimizationViews;
1189
1190 // Check that the optimization is carried out correctly
1191 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1192
1193 // ==============================================================================
1194 // The expected results are:
1195 // - Exactly one substitution, corresponding to the optimizable layers
1196 // - No failed subgraphs
1197 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1198 // ==============================================================================
1199
1200 // -----------------------
1201 // Check the substitutions
1202 // -----------------------
1203
1204 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Rob Hughes30db8ad2019-11-08 15:50:10 +00001205 BOOST_TEST(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001206
Rob Hughes30db8ad2019-11-08 15:50:10 +00001207 ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 3 };
1208 ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001209
Rob Hughes30db8ad2019-11-08 15:50:10 +00001210 SubgraphView::InputSlots expectedSubstitutableInputSlots = {
1211 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0]),
1212 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001213 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001214 SubgraphView::OutputSlots expectedSubstitutableOutputSlots =
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001215 {
Rob Hughes30db8ad2019-11-08 15:50:10 +00001216 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()[0])
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001217 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001218 SubgraphView::Layers expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001219 {
Rob Hughes30db8ad2019-11-08 15:50:10 +00001220 layersInGraph.at("conv1 layer"),
1221 layersInGraph.at("conv3 layer"),
1222 layersInGraph.at("add layer")
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001223 };
1224
Rob Hughes30db8ad2019-11-08 15:50:10 +00001225 CheckSubstitution(substitutions[0],
1226 expectedSubstitutableSubgraphSizes,
1227 expectedReplacementSubgraphSizes,
1228 expectedSubstitutableInputSlots,
1229 expectedSubstitutableOutputSlots,
1230 expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001231
1232 // --------------------------
1233 // Check the failed subgraphs
1234 // --------------------------
1235
1236 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1237
1238 // -----------------------------
1239 // Check the untouched subgraphs
1240 // -----------------------------
1241
1242 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1243 BOOST_TEST(untouchedSubgraphs.size() == 1);
1244
1245 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 } };
1246 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1247 {
1248 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots())
1249 };
1250 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1251 {
1252 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())
1253 };
1254 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1255 {
1256 { layersInGraph.at("conv2 layer unoptimizable") }
1257 };
1258
1259 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1260 {
1261 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1262 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1263 expectedUntouchedInputSlots.at(untouchedIndex),
1264 expectedUntouchedOutputSlots.at(untouchedIndex),
1265 expectedUntouchedLayers.at(untouchedIndex));
1266 }
1267}
1268
1269} // Anonymous namespace
1270
1271BOOST_AUTO_TEST_SUITE(OptimizeSubGraph)
1272
1273BOOST_AUTO_TEST_CASE(FullyUnsupportedSubgraph1) { FullyUnsupporteSubgraphTestImpl1(); }
1274BOOST_AUTO_TEST_CASE(FullyUnsupportedSubgraph2) { FullyUnsupporteSubgraphTestImpl2(); }
1275BOOST_AUTO_TEST_CASE(FullyOptimizableSubgraph1) { FullyOptimizableSubgraphTestImpl1(); }
1276BOOST_AUTO_TEST_CASE(FullyOptimizableSubgraph2) { FullyOptimizableSubgraphTestImpl2(); }
1277BOOST_AUTO_TEST_CASE(PartiallySupportedSubgraph) { PartiallySupportedSubgraphTestImpl(); }
1278BOOST_AUTO_TEST_CASE(FullyUnoptimizableSubgraph) { FullyUnoptimizableSubgraphTestImpl1(); }
1279BOOST_AUTO_TEST_CASE(PartiallyOptimizableSubgraph1) { PartiallyOptimizableSubgraphTestImpl1(); }
1280BOOST_AUTO_TEST_CASE(PartiallyOptimizableSubgraph2) { PartiallyOptimizableSubgraphTestImpl2(); }
1281
1282BOOST_AUTO_TEST_SUITE_END()