blob: f7ebf1a14e1ae225f9e3e7098ae38fa3d6da97c1 [file] [log] [blame]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "CommonTestUtils.hpp"
7#include "MockBackend.hpp"
8#include "MockBackendId.hpp"
9
10#include <Graph.hpp>
11#include <Network.hpp>
12
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010014
15#include <boost/test/unit_test.hpp>
16
17#include <unordered_map>
18
19using namespace armnn;
20
21namespace
22{
23
24// The expected number of layers, input and output slots in a subgraph after a test
25struct ExpectedSubgraphSize
26{
27 size_t m_NumInputSlots = 0;
28 size_t m_NumOutputSlots = 0;
29 size_t m_NumLayers = 0;
30};
31
32// Keep the layers organized by layer name
33using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
34
35// Used to convert input and output slots from reference type (as stored in graphs) to
36// pointer type (as stored in subgraphs)
37template <typename SlotType>
38SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
39{
40 return const_cast<SlotType*>(&input);
41}
42
43// Used to convert input and output slots from reference type (as stored in graphs) to
44// pointer type (as stored in subgraphs), array version
45template <typename SlotType>
46std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
47{
48 std::vector<SlotType*> output;
49 std::transform(input.begin(),
50 input.end(),
51 std::back_inserter(output),
52 [](const SlotType& inputItem)
53 {
54 return ConvertReferenceTypeToPointerType(inputItem);
55 });
56
57 return output;
58}
59
60// Convenience function to add an input layer to a graph
61Layer* AddInputLayer(Graph& graph,
62 const std::string& layerName,
63 const TensorInfo& inputInfo,
64 LayerBindingId inputId = 0)
65{
66 Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
67 BOOST_TEST(inputLayer);
68 inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
69 return inputLayer;
70}
71
72// Convenience function to add an output layer to a graph
73Layer* AddOutputLayer(Graph& graph,
74 const std::string& layerName)
75{
76 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
77 BOOST_TEST(outputLayer);
78 return outputLayer;
79}
80
81// Convenience function to add a convolution layer to a graph
82Convolution2dLayer* AddConvolutionLayer(Graph& graph,
83 LayerNameToLayerMap& layersInGraph,
84 const Convolution2dDescriptor& convolutionDescriptor,
85 const std::string& layerName,
86 const TensorInfo& weightInfo,
87 const TensorInfo& biasInfo,
88 const TensorInfo& outputInfo)
89{
90 Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
91 BOOST_TEST(convLayer);
92 SetWeightAndBias(convLayer, weightInfo, biasInfo);
93 convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
94 layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
95 return convLayer;
96}
97
98// Convenience function to add a pooling layer to a graph
99Pooling2dLayer* AddPoolingLayer(Graph& graph,
100 LayerNameToLayerMap& layersInGraph,
101 const Pooling2dDescriptor& poolingDescriptor,
102 const std::string& layerName,
103 const TensorInfo& outputInfo)
104{
105 Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
106 BOOST_TEST(poolingLayer);
107 poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
108 layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
109 return poolingLayer;
110}
111
112// Convenience function to add an addition layer to a graph
113AdditionLayer* AddAdditionaLayer(Graph& graph,
114 LayerNameToLayerMap& layersInGraph,
115 const std::string& layerName,
116 const TensorInfo& outputInfo)
117{
118 AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
119 BOOST_TEST(additionLayer);
120 additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
121 layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
122 return additionLayer;
123}
124
125// Convenience function to check that the given substitution matches the specified expected values
126void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
127 const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
128 const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
129 const SubgraphView::InputSlots& expectedSubstitutableInputSlots,
130 const SubgraphView::OutputSlots& expectedSubstitutableOutputSlots,
131 const SubgraphView::Layers& expectedSubstitutableLayers)
132{
133 const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
134 const SubgraphView::InputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetInputSlots();
135 const SubgraphView::OutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetOutputSlots();
136 const SubgraphView::Layers& substitutableSubgraphLayers = substitutableSubgraph.GetLayers();
137
138 const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
139 const SubgraphView::InputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetInputSlots();
140 const SubgraphView::OutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetOutputSlots();
141 const SubgraphView::Layers& replacementSubgraphLayers = replacementSubgraph.GetLayers();
142
143 BOOST_TEST(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
144 BOOST_TEST(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
145 BOOST_TEST(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
146
147 BOOST_TEST(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
148 BOOST_TEST(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
149 BOOST_TEST(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
150
151 BOOST_TEST(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
152 BOOST_TEST(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
153 BOOST_TEST(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
154
155 BOOST_TEST(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
156 BOOST_TEST(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
157 BOOST_TEST(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
158
159 BOOST_TEST(std::all_of(replacementSubgraphLayers.begin(),
160 replacementSubgraphLayers.end(),
161 [](const Layer* layer)
162 {
163 return layer->GetType() == LayerType::PreCompiled;
164 }));
165}
166
167// Convenience function to check that the given failed subgraph matches the specified expected values
168void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
169 const ExpectedSubgraphSize& expectedFailedSubgraphSize,
170 const SubgraphView::InputSlots& expectedFailedInputSlots,
171 const SubgraphView::OutputSlots& expectedFailedOutputSlots,
172 const SubgraphView::Layers& expectedFailedLayers)
173{
174 const SubgraphView::InputSlots& failedSubgraphInputSlots = failedSubgraph.GetInputSlots();
175 const SubgraphView::OutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetOutputSlots();
176 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
177
178 BOOST_TEST(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
179 BOOST_TEST(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
180 BOOST_TEST(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
181
182 BOOST_TEST(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
183 BOOST_TEST(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
184 BOOST_TEST(AreEqual(failedSubgraphLayers, expectedFailedLayers));
185}
186
187// Convenience function to check that the given untouched subgraph matches the specified expected values
188void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
189 const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
190 const SubgraphView::InputSlots& expectedUntouchedInputSlots,
191 const SubgraphView::OutputSlots& expectedUntouchedOutputSlots,
192 const SubgraphView::Layers& expectedUntouchedLayers)
193{
194 const SubgraphView::InputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetInputSlots();
195 const SubgraphView::OutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetOutputSlots();
196 const SubgraphView::Layers& untouchedSubgraphLayers = untouchedSubgraph.GetLayers();
197
198 BOOST_TEST(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
199 BOOST_TEST(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
200 BOOST_TEST(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
201
202 BOOST_TEST(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
203 BOOST_TEST(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
204 BOOST_TEST(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
205}
206
207// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
208SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
209{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000210 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
211 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100212
213 Pooling2dDescriptor poolingDescriptor;
214 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
215 poolingDescriptor.m_PoolWidth = 2;
216 poolingDescriptor.m_PoolHeight = 2;
217 poolingDescriptor.m_StrideX = 2;
218 poolingDescriptor.m_StrideY = 2;
219 poolingDescriptor.m_PadLeft = 1;
220 poolingDescriptor.m_PadRight = 1;
221 poolingDescriptor.m_PadTop = 1;
222 poolingDescriptor.m_PadBottom = 1;
223 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
224 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
225
226 // Construct the graph
227 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
228 Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
229 "pooling layer", outputInfo);
230 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
231
232 // Connect the network
233 inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
234 poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
235
236 // Create the subgraph view for the whole network
237 return CreateSubgraphViewFrom(CreateInputsFrom({poolingLayer}),
238 CreateOutputsFrom({poolingLayer}),
239 {poolingLayer});
240}
241
242// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
243SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
244{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000245 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
246 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100247
248 Pooling2dDescriptor poolingDescriptor;
249 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
250 poolingDescriptor.m_PoolWidth = 2;
251 poolingDescriptor.m_PoolHeight = 2;
252 poolingDescriptor.m_StrideX = 2;
253 poolingDescriptor.m_StrideY = 2;
254 poolingDescriptor.m_PadLeft = 1;
255 poolingDescriptor.m_PadRight = 1;
256 poolingDescriptor.m_PadTop = 1;
257 poolingDescriptor.m_PadBottom = 1;
258 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
259 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
260
261 // Construct the graph
262 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
263 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
264 "pooling1 layer", outputInfo);
265 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
266 "pooling2 layer", outputInfo);
267 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
268 "pooling3 layer", outputInfo);
269 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
270
271 // Connect the network
272 inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
273 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
274 pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
275 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
276
277 // Create the subgraph view for the whole network
278 return CreateSubgraphViewFrom(CreateInputsFrom({pooling1Layer}),
279 CreateOutputsFrom({pooling3Layer}),
280 {pooling1Layer,
281 pooling2Layer,
282 pooling3Layer});
283}
284
285// Creates a simple subgraph with only one convolution layer, supported by the mock backend
286SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
287{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000288 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
289 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
290 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100291 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
292
293 Convolution2dDescriptor convolutionDescriptor;
294 convolutionDescriptor.m_StrideX = 1;
295 convolutionDescriptor.m_StrideY = 1;
296 convolutionDescriptor.m_BiasEnabled = true;
297 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
298
299 // Construct the graph
300 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
301 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
302 "conv layer", weightInfo, biasInfo, outputInfo);
303 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
304
305 // Connect the network
306 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
307 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
308
309 // Create the subgraph view for the whole network
310 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
311 CreateOutputsFrom({convLayer}),
312 {convLayer});
313}
314
315// Creates a subgraph with five convolutions layers, all supported by the mock backend
316SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
317{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000318 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
319 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
320 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100321 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
322
323 Convolution2dDescriptor convolutionDescriptor;
324 convolutionDescriptor.m_StrideX = 1;
325 convolutionDescriptor.m_StrideY = 1;
326 convolutionDescriptor.m_BiasEnabled = true;
327 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
328
329 // Construct the graph
330 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
331 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
332 "conv1 layer", weightInfo, biasInfo, outputInfo);
333 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
334 "conv2 layer", weightInfo, biasInfo, outputInfo);
335 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
336 "conv3 layer", weightInfo, biasInfo, outputInfo);
337 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
338 "conv4 layer", weightInfo, biasInfo, outputInfo);
339 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
340 "conv5 layer", weightInfo, biasInfo, outputInfo);
341 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
342
343 // Connect the network
344 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
345 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
346 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
347 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
348 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
349 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
350
351 // Create the subgraph view for the whole network
352 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
353 CreateOutputsFrom({conv5Layer}),
354 {conv1Layer,
355 conv2Layer,
356 conv3Layer,
357 conv4Layer,
358 conv5Layer});
359}
360
361// Creates a subgraph with both supported and unsupported layers
362// (only convolutions are unsupported by the mock backend)
363SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
364{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000365 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
366 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
367 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100368 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
369
370 Convolution2dDescriptor convolutionDescriptor;
371 convolutionDescriptor.m_StrideX = 1;
372 convolutionDescriptor.m_StrideY = 1;
373 convolutionDescriptor.m_BiasEnabled = true;
374 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
375
376 Pooling2dDescriptor poolingDescriptor;
377 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
378 poolingDescriptor.m_PoolWidth = 2;
379 poolingDescriptor.m_PoolHeight = 2;
380 poolingDescriptor.m_StrideX = 2;
381 poolingDescriptor.m_StrideY = 2;
382 poolingDescriptor.m_PadLeft = 1;
383 poolingDescriptor.m_PadRight = 1;
384 poolingDescriptor.m_PadTop = 1;
385 poolingDescriptor.m_PadBottom = 1;
386 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
387 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
388
389 // Construct the graph
390 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
391 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
392 "conv1 layer", weightInfo, biasInfo, outputInfo);
393 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
394 "pooling1 layer", outputInfo);
395 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
396 "pooling2 layer", outputInfo);
397 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
398 "conv2 layer", weightInfo, biasInfo, outputInfo);
399 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
400 "pooling3 layer", outputInfo);
401 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
402
403 // Connect the network
404 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
405 conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
406 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
407 pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
408 conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
409 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
410
411 // Create the subgraph view for the whole network
412 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
413 CreateOutputsFrom({pooling3Layer}),
414 {conv1Layer,
415 pooling1Layer,
416 pooling2Layer,
417 conv2Layer,
418 pooling3Layer});
419}
420
421// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
422SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
423{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000424 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
425 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
426 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100427 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
428
429 Convolution2dDescriptor convolutionDescriptor;
430 convolutionDescriptor.m_StrideX = 1;
431 convolutionDescriptor.m_StrideY = 1;
432 convolutionDescriptor.m_BiasEnabled = true;
433 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
434
435 // Construct the graph
436 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
437 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
438 "conv layer unoptimizable", weightInfo, biasInfo,
439 outputInfo);
440 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
441
442 // Connect the network
443 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
444 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
445
446 // Create the subgraph view for the whole network
447 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
448 CreateOutputsFrom({convLayer}),
449 {convLayer});
450}
451
452// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
453SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
454{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000455 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
456 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
457 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100458 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
459
460 Convolution2dDescriptor convolutionDescriptor;
461 convolutionDescriptor.m_StrideX = 1;
462 convolutionDescriptor.m_StrideY = 1;
463 convolutionDescriptor.m_BiasEnabled = true;
464 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
465
466 // Construct the graph
467 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
468 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
469 "conv1 layer", weightInfo, biasInfo, outputInfo);
470 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
471 "conv2 layer unoptimizable", weightInfo, biasInfo,
472 outputInfo);
473 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
474 "conv3 layer", weightInfo, biasInfo, outputInfo);
475 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
476 "conv4 layer unoptimizable", weightInfo, biasInfo,
477 outputInfo);
478 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
479 "conv5 layer", weightInfo, biasInfo, outputInfo);
480 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
481
482 // Connect the network
483 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
484 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
485 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
486 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
487 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
488 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
489
490 // Create the subgraph view for the whole network
491 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
492 CreateOutputsFrom({conv5Layer}),
493 {conv1Layer,
494 conv2Layer,
495 conv3Layer,
496 conv4Layer,
497 conv5Layer});
498}
499
500// Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
501// this is meant to test input slots coming from different layers
502SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
503{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000504 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
505 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
506 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100507 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
508
509 Convolution2dDescriptor convolutionDescriptor;
510 convolutionDescriptor.m_StrideX = 1;
511 convolutionDescriptor.m_StrideY = 1;
512 convolutionDescriptor.m_BiasEnabled = true;
513 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
514
515 // Construct the graph
516 Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
517 Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
518 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
519 "conv1 layer", weightInfo, biasInfo, outputInfo);
520 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
521 "conv2 layer unoptimizable", weightInfo, biasInfo,
522 outputInfo);
523 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
524 "conv3 layer", weightInfo, biasInfo, outputInfo);
525 AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
526 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
527
528 // Connect the network
529 input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
530 input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
531 conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
532 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
533 conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
534 addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
535
536 // Create the subgraph view for the whole network
537 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
538 conv2Layer}),
539 CreateOutputsFrom({addLayer}),
540 {conv1Layer,
541 conv2Layer,
542 conv3Layer,
543 addLayer});
544}
545
546// The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
547void FullyUnsupporteSubgraphTestImpl1()
548{
549 Graph graph;
550 LayerNameToLayerMap layersInGraph;
551
552 // Create an unsupported subgraph
553 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
554 BOOST_TEST((subgraphPtr != nullptr));
555
556 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
557 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
558 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
559
560 BOOST_TEST(subgraphInputSlots.size() == 1);
561 BOOST_TEST(subgraphOutputSlots.size() == 1);
562 BOOST_TEST(subgraphLayers.size() == 1);
563
564 BOOST_TEST(Contains(layersInGraph, "pooling layer"));
565
566 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000567 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100568 auto backendObjPtr = CreateBackendObject(MockBackendId());
569 BOOST_TEST((backendObjPtr != nullptr));
570
571 // Optimize the subgraph
572 OptimizationViews optimizationViews;
573
574 // Check that the optimization is carried out correctly, but no optimization is performed
575 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
576
577 // =======================================================================
578 // The expected results are:
579 // - No substitutions
580 // - Exactly one failed subgraph, corresponding to the whole original one
581 // - No untouched subgraphs
582 // =======================================================================
583
584 // -----------------------
585 // Check the substitutions
586 // -----------------------
587
588 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
589
590 // --------------------------
591 // Check the failed subgraphs
592 // --------------------------
593
594 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
595 BOOST_TEST(failedSubgraphs.size() == 1);
596
597 CheckFailedSubgraph(failedSubgraphs.at(0),
598 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
599 subgraphInputSlots,
600 subgraphOutputSlots,
601 subgraphLayers);
602
603 // -----------------------------
604 // Check the untouched subgraphs
605 // -----------------------------
606
607 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
608}
609
610// The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
611void FullyUnsupporteSubgraphTestImpl2()
612{
613 Graph graph;
614 LayerNameToLayerMap layersInGraph;
615
616 // Create an unsupported subgraph
617 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
618 BOOST_TEST((subgraphPtr != nullptr));
619
620 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
621 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
622 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
623
624 BOOST_TEST(subgraphInputSlots.size() == 1);
625 BOOST_TEST(subgraphOutputSlots.size() == 1);
626 BOOST_TEST(subgraphLayers.size() == 3);
627
628 BOOST_TEST(Contains(layersInGraph, "pooling1 layer"));
629 BOOST_TEST(Contains(layersInGraph, "pooling2 layer"));
630 BOOST_TEST(Contains(layersInGraph, "pooling3 layer"));
631
632 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000633 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100634 auto backendObjPtr = CreateBackendObject(MockBackendId());
635 BOOST_TEST((backendObjPtr != nullptr));
636
637 // Optimize the subgraph
638 OptimizationViews optimizationViews;
639
640 // Check that the optimization is carried out correctly, but no optimization is performed
641 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
642
643 // =======================================================================
644 // The expected results are:
645 // - No substitutions
646 // - Exactly one failed subgraph, corresponding to the whole original one
647 // - No untouched subgraphs
648 // =======================================================================
649
650 // -----------------------
651 // Check the substitutions
652 // -----------------------
653
654 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
655
656 // --------------------------
657 // Check the failed subgraphs
658 // --------------------------
659
660 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
661 BOOST_TEST(failedSubgraphs.size() == 1);
662
663 std::vector<Layer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
664 layersInGraph.at("pooling2 layer"),
665 layersInGraph.at("pooling3 layer") };
666
667 const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
668
669 CheckFailedSubgraph(failedSubgraph,
670 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
671 subgraphInputSlots,
672 subgraphOutputSlots,
673 subgraphLayers);
674
675 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
676
677 BOOST_TEST(failedSubgraphLayers.front() + 0, expectedFailedLayers.at(0));
678 BOOST_TEST(failedSubgraphLayers.front() + 1, expectedFailedLayers.at(1));
679 BOOST_TEST(failedSubgraphLayers.front() + 2, expectedFailedLayers.at(2));
680
681 // -----------------------------
682 // Check the untouched subgraphs
683 // -----------------------------
684
685 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
686}
687
688// A simple case with only one layer (convolution) to optimize, supported by the mock backend
689void FullyOptimizableSubgraphTestImpl1()
690{
691 Graph graph;
692 LayerNameToLayerMap layersInGraph;
693
694 // Create a fully optimizable subgraph
695 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
696 BOOST_TEST((subgraphPtr != nullptr));
697
698 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
699 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
700 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
701
702 BOOST_TEST(subgraphInputSlots.size() == 1);
703 BOOST_TEST(subgraphOutputSlots.size() == 1);
704 BOOST_TEST(subgraphLayers.size() == 1);
705
706 BOOST_TEST(Contains(layersInGraph, "conv layer"));
707
708 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000709 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100710 auto backendObjPtr = CreateBackendObject(MockBackendId());
711 BOOST_TEST((backendObjPtr != nullptr));
712
713 // Optimize the subgraph
714 OptimizationViews optimizationViews;
715
716 // Check that the optimization is carried out correctly
717 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
718
719 // ===========================================================================================
720 // The expected results are:
721 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
722 // - No failed subgraphs
723 // - No untouched subgraphs
724 // ===========================================================================================
725
726 // -----------------------
727 // Check the substitutions
728 // -----------------------
729
730 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
731 BOOST_TEST(substitutions.size() == 1);
732
733 CheckSubstitution(substitutions.at(0),
734 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
735 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
736 subgraphInputSlots,
737 subgraphOutputSlots,
738 subgraphLayers);
739
740 // --------------------------
741 // Check the failed subgraphs
742 // --------------------------
743
744 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
745
746 // -----------------------------
747 // Check the untouched subgraphs
748 // -----------------------------
749
750 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
751}
752
753// A case with five layers (all convolutions) to optimize, all supported by the mock backend
754void FullyOptimizableSubgraphTestImpl2()
755{
756 Graph graph;
757 LayerNameToLayerMap layersInGraph;
758
759 // Create a fully optimizable subgraph
760 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
761 BOOST_TEST((subgraphPtr != nullptr));
762
763 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
764 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
765 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
766
767 BOOST_TEST(subgraphPtr->GetInputSlots().size() == 1);
768 BOOST_TEST(subgraphPtr->GetOutputSlots().size() == 1);
769 BOOST_TEST(subgraphPtr->GetLayers().size() == 5);
770
771 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
772 BOOST_TEST(Contains(layersInGraph, "conv2 layer"));
773 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
774 BOOST_TEST(Contains(layersInGraph, "conv4 layer"));
775 BOOST_TEST(Contains(layersInGraph, "conv5 layer"));
776
777 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000778 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100779 auto backendObjPtr = CreateBackendObject(MockBackendId());
780 BOOST_TEST((backendObjPtr != nullptr));
781
782 // Optimize the subgraph
783 OptimizationViews optimizationViews;
784
785 // Check that the optimization is carried out correctly
786 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
787
788 // ===========================================================================================
789 // The expected results are:
790 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
791 // - No failed subgraphs
792 // - No untouched subgraphs
793 // ===========================================================================================
794
795 // -----------------------
796 // Check the substitutions
797 // -----------------------
798
799 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
800 BOOST_TEST(substitutions.size() == 1);
801
802 std::list<Layer*> expectedSubstitutableLayers{ layersInGraph.at("conv1 layer"),
803 layersInGraph.at("conv2 layer"),
804 layersInGraph.at("conv3 layer"),
805 layersInGraph.at("conv4 layer"),
806 layersInGraph.at("conv5 layer") };
807
808 const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
809
810 CheckSubstitution(substitution,
811 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
812 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
813 subgraphInputSlots,
814 subgraphOutputSlots,
815 expectedSubstitutableLayers);
816
817 const SubgraphView::Layers& substitutableSubgraphLayers = substitution.m_SubstitutableSubgraph.GetLayers();
818
819 BOOST_TEST(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
820 BOOST_TEST(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
821 BOOST_TEST(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
822 BOOST_TEST(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
823 BOOST_TEST(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
824
825 // --------------------------
826 // Check the failed subgraphs
827 // --------------------------
828
829 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
830
831 // -----------------------------
832 // Check the untouched subgraphs
833 // -----------------------------
834
835 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
836}
837
838// The input subgraph contaions both supported and unsupported layers
839// (but only convolutions are unsupported by the mock backend)
840void PartiallySupportedSubgraphTestImpl()
841{
842 Graph graph;
843 LayerNameToLayerMap layersInGraph;
844
845 // Create a fully optimizable subgraph
846 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
847 BOOST_TEST((subgraphPtr != nullptr));
848
849 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
850 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
851 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
852
853 BOOST_TEST(subgraphInputSlots.size() == 1);
854 BOOST_TEST(subgraphOutputSlots.size() == 1);
855 BOOST_TEST(subgraphLayers.size() == 5);
856
857 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
858 BOOST_TEST(Contains(layersInGraph, "pooling1 layer"));
859 BOOST_TEST(Contains(layersInGraph, "pooling2 layer"));
860 BOOST_TEST(Contains(layersInGraph, "conv2 layer"));
861 BOOST_TEST(Contains(layersInGraph, "pooling3 layer"));
862
863 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000864 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100865 auto backendObjPtr = CreateBackendObject(MockBackendId());
866 BOOST_TEST((backendObjPtr != nullptr));
867
868 // Optimize the subgraph
869 OptimizationViews optimizationViews;
870
871 // Check that the optimization is carried out correctly
872 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
873
874 // ========================================================================
875 // The expected results are:
876 // - Exactly two substitution, corresponding to the supported layers
877 // - Exactly two failed subgraphs, corresponding to the unsupported layers
878 // - No untouched subgraphs
879 // ========================================================================
880
881 // -----------------------
882 // Check the substitutions
883 // -----------------------
884
Rob Hughes30db8ad2019-11-08 15:50:10 +0000885 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100886 BOOST_TEST(substitutions.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +0000887 // Sort into a consistent order
888 std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
889 return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
890 s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0;
891 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100892
893 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
894 { 1, 1, 1 } };
895 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
896 { 1, 1, 1 } };
897 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
898 {
899 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
900 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlots())
901 };
902 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
903 {
904 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
905 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots())
906 };
907 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
908 {
909 { layersInGraph.at("conv1 layer") },
910 { layersInGraph.at("conv2 layer") }
911 };
912
913 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
914 {
915 CheckSubstitution(substitutions.at(substitutionIndex),
916 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
917 expectedReplacementSubgraphSizes.at(substitutionIndex),
918 expectedSubstitutableInputSlots.at(substitutionIndex),
919 expectedSubstitutableOutputSlots.at(substitutionIndex),
920 expectedSubstitutableLayers.at(substitutionIndex));
921 }
922
923 // --------------------------
924 // Check the failed subgraphs
925 // --------------------------
926
Rob Hughes30db8ad2019-11-08 15:50:10 +0000927 OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100928 BOOST_TEST(failedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +0000929 // Sort into a consistent order
930 std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
931 return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
932 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100933
934 std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
935 { 1, 1, 1 } };
936 std::vector<SubgraphView::InputSlots> expectedFailedInputSlots
937 {
938 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots()),
939 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots())
940 };
941 std::vector<SubgraphView::OutputSlots> expectedFailedOutputSlots
942 {
943 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots()),
944 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots())
945 };
946 std::vector<SubgraphView::Layers> expectedFailedLayers
947 {
948 { layersInGraph.at("pooling1 layer"),
949 layersInGraph.at("pooling2 layer") },
950 { layersInGraph.at("pooling3 layer") }
951 };
952
953 for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
954 {
955 CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
956 expectedFailedSubgraphSizes.at(failedIndex),
957 expectedFailedInputSlots.at(failedIndex),
958 expectedFailedOutputSlots.at(failedIndex),
959 expectedFailedLayers.at(failedIndex));
960 }
961
962 // -----------------------------
963 // Check the untouched subgraphs
964 // -----------------------------
965
966 BOOST_TEST(optimizationViews.GetUntouchedSubgraphs().empty());
967}
968
969// The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
970void FullyUnoptimizableSubgraphTestImpl1()
971{
972 Graph graph;
973 LayerNameToLayerMap layersInGraph;
974
975 // Create a fully optimizable subgraph
976 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
977 BOOST_TEST((subgraphPtr != nullptr));
978
979 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
980 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
981 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
982
983 BOOST_TEST(subgraphInputSlots.size() == 1);
984 BOOST_TEST(subgraphOutputSlots.size() == 1);
985 BOOST_TEST(subgraphLayers.size() == 1);
986
987 BOOST_TEST(Contains(layersInGraph, "conv layer unoptimizable"));
988
989 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000990 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100991 auto backendObjPtr = CreateBackendObject(MockBackendId());
992 BOOST_TEST((backendObjPtr != nullptr));
993
994 // Optimize the subgraph
995 OptimizationViews optimizationViews;
996
997 // Check that the optimization is carried out correctly
998 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
999
1000 // ============================================================================
1001 // The expected results are:
1002 // - No substitutions
1003 // - No failed subgraphs
1004 // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1005 // ============================================================================
1006
1007 // -----------------------
1008 // Check the substitutions
1009 // -----------------------
1010
1011 BOOST_TEST(optimizationViews.GetSubstitutions().empty());
1012
1013 // --------------------------
1014 // Check the failed subgraphs
1015 // --------------------------
1016
1017 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1018
1019 // -----------------------------
1020 // Check the untouched subgraphs
1021 // -----------------------------
1022
1023 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1024 BOOST_TEST(untouchedSubgraphs.size() == 1);
1025
1026 CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1027 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
1028 subgraphInputSlots,
1029 subgraphOutputSlots,
1030 subgraphLayers);
1031}
1032
1033// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1034void PartiallyOptimizableSubgraphTestImpl1()
1035{
1036 Graph graph;
1037 LayerNameToLayerMap layersInGraph;
1038
1039 // Create a fully optimizable subgraph
1040 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1041 BOOST_TEST((subgraphPtr != nullptr));
1042
1043 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1044 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1045 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1046
1047 BOOST_TEST(subgraphInputSlots.size() == 1);
1048 BOOST_TEST(subgraphOutputSlots.size() == 1);
1049 BOOST_TEST(subgraphLayers.size() == 5);
1050
1051 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
1052 BOOST_TEST(Contains(layersInGraph, "conv2 layer unoptimizable"));
1053 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
1054 BOOST_TEST(Contains(layersInGraph, "conv4 layer unoptimizable"));
1055 BOOST_TEST(Contains(layersInGraph, "conv5 layer"));
1056
1057 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001058 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001059 auto backendObjPtr = CreateBackendObject(MockBackendId());
1060 BOOST_TEST((backendObjPtr != nullptr));
1061
1062 // Optimize the subgraph
1063 OptimizationViews optimizationViews;
1064
1065 // Check that the optimization is carried out correctly
1066 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1067
1068 // ===============================================================================
1069 // The expected results are:
1070 // - Exactly three substitutions, corresponding to the optimizable layers
1071 // - No failed subgraphs
1072 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1073 // ===============================================================================
1074
1075 // -----------------------
1076 // Check the substitutions
1077 // -----------------------
1078
Rob Hughes30db8ad2019-11-08 15:50:10 +00001079 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001080 BOOST_TEST(substitutions.size() == 3);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001081 // Sort into a consistent order
1082 std::sort(substitutions.begin(), substitutions.end(),
1083 [](auto s1, auto s2) { return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
1084 s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0; });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001085
1086 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1087 { 1, 1, 1 },
1088 { 1, 1, 1 } };
1089 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1090 { 1, 1, 1 },
1091 { 1, 1, 1 } };
1092 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
1093 {
1094 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
1095 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()),
1096 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlots())
1097 };
1098 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
1099 {
1100 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
1101 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots()),
1102 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots())
1103 };
1104 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
1105 {
1106 { layersInGraph.at("conv1 layer") },
1107 { layersInGraph.at("conv3 layer") },
1108 { layersInGraph.at("conv5 layer") }
1109 };
1110
1111 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1112 {
1113 CheckSubstitution(substitutions.at(substitutionIndex),
1114 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1115 expectedReplacementSubgraphSizes.at(substitutionIndex),
1116 expectedSubstitutableInputSlots.at(substitutionIndex),
1117 expectedSubstitutableOutputSlots.at(substitutionIndex),
1118 expectedSubstitutableLayers.at(substitutionIndex));
1119 }
1120
1121 // --------------------------
1122 // Check the failed subgraphs
1123 // --------------------------
1124
1125 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1126
1127 // -----------------------------
1128 // Check the untouched subgraphs
1129 // -----------------------------
1130
Rob Hughes30db8ad2019-11-08 15:50:10 +00001131 OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001132 BOOST_TEST(untouchedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001133 // Sort into a consistent order
1134 std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1135 return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
1136 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001137
1138 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 },
1139 { 1, 1, 1 } };
1140 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1141 {
1142 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots()),
1143 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetInputSlots())
1144 };
1145 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1146 {
1147 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()),
1148 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots())
1149 };
1150 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1151 {
1152 { layersInGraph.at("conv2 layer unoptimizable") },
1153 { layersInGraph.at("conv4 layer unoptimizable") }
1154 };
1155
1156 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1157 {
1158 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1159 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1160 expectedUntouchedInputSlots.at(untouchedIndex),
1161 expectedUntouchedOutputSlots.at(untouchedIndex),
1162 expectedUntouchedLayers.at(untouchedIndex));
1163 }
1164}
1165
1166// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1167// this is meant to test input slots coming from different layers
1168void PartiallyOptimizableSubgraphTestImpl2()
1169{
1170 Graph graph;
1171 LayerNameToLayerMap layersInGraph;
1172
Rob Hughes30db8ad2019-11-08 15:50:10 +00001173 // Create a partially optimizable subgraph
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001174 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1175 BOOST_TEST((subgraphPtr != nullptr));
1176
1177 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1178 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1179 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1180
1181 BOOST_TEST(subgraphInputSlots.size() == 2);
1182 BOOST_TEST(subgraphOutputSlots.size() == 1);
1183 BOOST_TEST(subgraphLayers.size() == 4);
1184
1185 BOOST_TEST(Contains(layersInGraph, "conv1 layer"));
1186 BOOST_TEST(Contains(layersInGraph, "conv2 layer unoptimizable"));
1187 BOOST_TEST(Contains(layersInGraph, "conv3 layer"));
1188 BOOST_TEST(Contains(layersInGraph, "add layer"));
1189
1190 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001191 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001192 auto backendObjPtr = CreateBackendObject(MockBackendId());
1193 BOOST_TEST((backendObjPtr != nullptr));
1194
1195 // Optimize the subgraph
1196 OptimizationViews optimizationViews;
1197
1198 // Check that the optimization is carried out correctly
1199 BOOST_CHECK_NO_THROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1200
1201 // ==============================================================================
1202 // The expected results are:
1203 // - Exactly one substitution, corresponding to the optimizable layers
1204 // - No failed subgraphs
1205 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1206 // ==============================================================================
1207
1208 // -----------------------
1209 // Check the substitutions
1210 // -----------------------
1211
1212 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Rob Hughes30db8ad2019-11-08 15:50:10 +00001213 BOOST_TEST(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001214
Rob Hughes30db8ad2019-11-08 15:50:10 +00001215 ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 3 };
1216 ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001217
Rob Hughes30db8ad2019-11-08 15:50:10 +00001218 SubgraphView::InputSlots expectedSubstitutableInputSlots = {
1219 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0]),
1220 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001221 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001222 SubgraphView::OutputSlots expectedSubstitutableOutputSlots =
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001223 {
Rob Hughes30db8ad2019-11-08 15:50:10 +00001224 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()[0])
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001225 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001226 SubgraphView::Layers expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001227 {
Rob Hughes30db8ad2019-11-08 15:50:10 +00001228 layersInGraph.at("conv1 layer"),
1229 layersInGraph.at("conv3 layer"),
1230 layersInGraph.at("add layer")
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001231 };
1232
Rob Hughes30db8ad2019-11-08 15:50:10 +00001233 CheckSubstitution(substitutions[0],
1234 expectedSubstitutableSubgraphSizes,
1235 expectedReplacementSubgraphSizes,
1236 expectedSubstitutableInputSlots,
1237 expectedSubstitutableOutputSlots,
1238 expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001239
1240 // --------------------------
1241 // Check the failed subgraphs
1242 // --------------------------
1243
1244 BOOST_TEST(optimizationViews.GetFailedSubgraphs().empty());
1245
1246 // -----------------------------
1247 // Check the untouched subgraphs
1248 // -----------------------------
1249
1250 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1251 BOOST_TEST(untouchedSubgraphs.size() == 1);
1252
1253 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 } };
1254 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1255 {
1256 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots())
1257 };
1258 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1259 {
1260 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())
1261 };
1262 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1263 {
1264 { layersInGraph.at("conv2 layer unoptimizable") }
1265 };
1266
1267 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1268 {
1269 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1270 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1271 expectedUntouchedInputSlots.at(untouchedIndex),
1272 expectedUntouchedOutputSlots.at(untouchedIndex),
1273 expectedUntouchedLayers.at(untouchedIndex));
1274 }
1275}
1276
1277} // Anonymous namespace
1278
1279BOOST_AUTO_TEST_SUITE(OptimizeSubGraph)
1280
1281BOOST_AUTO_TEST_CASE(FullyUnsupportedSubgraph1) { FullyUnsupporteSubgraphTestImpl1(); }
1282BOOST_AUTO_TEST_CASE(FullyUnsupportedSubgraph2) { FullyUnsupporteSubgraphTestImpl2(); }
1283BOOST_AUTO_TEST_CASE(FullyOptimizableSubgraph1) { FullyOptimizableSubgraphTestImpl1(); }
1284BOOST_AUTO_TEST_CASE(FullyOptimizableSubgraph2) { FullyOptimizableSubgraphTestImpl2(); }
1285BOOST_AUTO_TEST_CASE(PartiallySupportedSubgraph) { PartiallySupportedSubgraphTestImpl(); }
1286BOOST_AUTO_TEST_CASE(FullyUnoptimizableSubgraph) { FullyUnoptimizableSubgraphTestImpl1(); }
1287BOOST_AUTO_TEST_CASE(PartiallyOptimizableSubgraph1) { PartiallyOptimizableSubgraphTestImpl1(); }
1288BOOST_AUTO_TEST_CASE(PartiallyOptimizableSubgraph2) { PartiallyOptimizableSubgraphTestImpl2(); }
1289
1290BOOST_AUTO_TEST_SUITE_END()