blob: 6c76da67b3b9e07fce03a6aa4b75b548ffc82a4d [file] [log] [blame]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "CommonTestUtils.hpp"
7#include "MockBackend.hpp"
8#include "MockBackendId.hpp"
9
10#include <Graph.hpp>
11#include <Network.hpp>
12
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010014
Sadik Armagan1625efc2021-06-10 18:24:34 +010015#include <doctest/doctest.h>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010016#include <unordered_map>
17
18using namespace armnn;
19
20namespace
21{
22
23// The expected number of layers, input and output slots in a subgraph after a test
24struct ExpectedSubgraphSize
25{
26 size_t m_NumInputSlots = 0;
27 size_t m_NumOutputSlots = 0;
28 size_t m_NumLayers = 0;
29};
30
31// Keep the layers organized by layer name
32using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33
34// Used to convert input and output slots from reference type (as stored in graphs) to
35// pointer type (as stored in subgraphs)
36template <typename SlotType>
37SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38{
39 return const_cast<SlotType*>(&input);
40}
41
42// Used to convert input and output slots from reference type (as stored in graphs) to
43// pointer type (as stored in subgraphs), array version
44template <typename SlotType>
45std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46{
47 std::vector<SlotType*> output;
48 std::transform(input.begin(),
49 input.end(),
50 std::back_inserter(output),
51 [](const SlotType& inputItem)
52 {
53 return ConvertReferenceTypeToPointerType(inputItem);
54 });
55
56 return output;
57}
58
59// Convenience function to add an input layer to a graph
60Layer* AddInputLayer(Graph& graph,
61 const std::string& layerName,
62 const TensorInfo& inputInfo,
63 LayerBindingId inputId = 0)
64{
65 Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010066 CHECK(inputLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010067 inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
68 return inputLayer;
69}
70
71// Convenience function to add an output layer to a graph
72Layer* AddOutputLayer(Graph& graph,
73 const std::string& layerName)
74{
75 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010076 CHECK(outputLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010077 return outputLayer;
78}
79
80// Convenience function to add a convolution layer to a graph
81Convolution2dLayer* AddConvolutionLayer(Graph& graph,
82 LayerNameToLayerMap& layersInGraph,
83 const Convolution2dDescriptor& convolutionDescriptor,
84 const std::string& layerName,
85 const TensorInfo& weightInfo,
86 const TensorInfo& biasInfo,
87 const TensorInfo& outputInfo)
88{
89 Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010090 CHECK(convLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010091 SetWeightAndBias(convLayer, weightInfo, biasInfo);
92 convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
93 layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
94 return convLayer;
95}
96
97// Convenience function to add a pooling layer to a graph
98Pooling2dLayer* AddPoolingLayer(Graph& graph,
99 LayerNameToLayerMap& layersInGraph,
100 const Pooling2dDescriptor& poolingDescriptor,
101 const std::string& layerName,
102 const TensorInfo& outputInfo)
103{
104 Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100105 CHECK(poolingLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100106 poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
107 layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
108 return poolingLayer;
109}
110
111// Convenience function to add an addition layer to a graph
112AdditionLayer* AddAdditionaLayer(Graph& graph,
113 LayerNameToLayerMap& layersInGraph,
114 const std::string& layerName,
115 const TensorInfo& outputInfo)
116{
117 AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100118 CHECK(additionLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100119 additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
120 layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
121 return additionLayer;
122}
123
124// Convenience function to check that the given substitution matches the specified expected values
125void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
126 const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
127 const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
128 const SubgraphView::InputSlots& expectedSubstitutableInputSlots,
129 const SubgraphView::OutputSlots& expectedSubstitutableOutputSlots,
130 const SubgraphView::Layers& expectedSubstitutableLayers)
131{
132 const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
133 const SubgraphView::InputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetInputSlots();
134 const SubgraphView::OutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetOutputSlots();
135 const SubgraphView::Layers& substitutableSubgraphLayers = substitutableSubgraph.GetLayers();
136
137 const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
138 const SubgraphView::InputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetInputSlots();
139 const SubgraphView::OutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetOutputSlots();
140 const SubgraphView::Layers& replacementSubgraphLayers = replacementSubgraph.GetLayers();
141
Sadik Armagan1625efc2021-06-10 18:24:34 +0100142 CHECK(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
143 CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
144 CHECK(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100145
Sadik Armagan1625efc2021-06-10 18:24:34 +0100146 CHECK(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
147 CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
148 CHECK(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100149
Sadik Armagan1625efc2021-06-10 18:24:34 +0100150 CHECK(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
151 CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
152 CHECK(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100153
Sadik Armagan1625efc2021-06-10 18:24:34 +0100154 CHECK(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
155 CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
156 CHECK(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100157
Sadik Armagan1625efc2021-06-10 18:24:34 +0100158 CHECK(std::all_of(replacementSubgraphLayers.begin(),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100159 replacementSubgraphLayers.end(),
160 [](const Layer* layer)
161 {
162 return layer->GetType() == LayerType::PreCompiled;
163 }));
164}
165
166// Convenience function to check that the given failed subgraph matches the specified expected values
167void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
168 const ExpectedSubgraphSize& expectedFailedSubgraphSize,
169 const SubgraphView::InputSlots& expectedFailedInputSlots,
170 const SubgraphView::OutputSlots& expectedFailedOutputSlots,
171 const SubgraphView::Layers& expectedFailedLayers)
172{
173 const SubgraphView::InputSlots& failedSubgraphInputSlots = failedSubgraph.GetInputSlots();
174 const SubgraphView::OutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetOutputSlots();
175 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
176
Sadik Armagan1625efc2021-06-10 18:24:34 +0100177 CHECK(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
178 CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
179 CHECK(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100180
Sadik Armagan1625efc2021-06-10 18:24:34 +0100181 CHECK(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
182 CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
183 CHECK(AreEqual(failedSubgraphLayers, expectedFailedLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100184}
185
186// Convenience function to check that the given untouched subgraph matches the specified expected values
187void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
188 const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
189 const SubgraphView::InputSlots& expectedUntouchedInputSlots,
190 const SubgraphView::OutputSlots& expectedUntouchedOutputSlots,
191 const SubgraphView::Layers& expectedUntouchedLayers)
192{
193 const SubgraphView::InputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetInputSlots();
194 const SubgraphView::OutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetOutputSlots();
195 const SubgraphView::Layers& untouchedSubgraphLayers = untouchedSubgraph.GetLayers();
196
Sadik Armagan1625efc2021-06-10 18:24:34 +0100197 CHECK(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
198 CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
199 CHECK(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100200
Sadik Armagan1625efc2021-06-10 18:24:34 +0100201 CHECK(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
202 CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
203 CHECK(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100204}
205
206// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
207SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
208{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000209 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
210 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100211
212 Pooling2dDescriptor poolingDescriptor;
213 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
214 poolingDescriptor.m_PoolWidth = 2;
215 poolingDescriptor.m_PoolHeight = 2;
216 poolingDescriptor.m_StrideX = 2;
217 poolingDescriptor.m_StrideY = 2;
218 poolingDescriptor.m_PadLeft = 1;
219 poolingDescriptor.m_PadRight = 1;
220 poolingDescriptor.m_PadTop = 1;
221 poolingDescriptor.m_PadBottom = 1;
222 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
223 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
224
225 // Construct the graph
226 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
227 Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
228 "pooling layer", outputInfo);
229 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
230
231 // Connect the network
232 inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
233 poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
234
235 // Create the subgraph view for the whole network
236 return CreateSubgraphViewFrom(CreateInputsFrom({poolingLayer}),
237 CreateOutputsFrom({poolingLayer}),
238 {poolingLayer});
239}
240
241// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
242SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
243{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000244 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
245 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100246
247 Pooling2dDescriptor poolingDescriptor;
248 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
249 poolingDescriptor.m_PoolWidth = 2;
250 poolingDescriptor.m_PoolHeight = 2;
251 poolingDescriptor.m_StrideX = 2;
252 poolingDescriptor.m_StrideY = 2;
253 poolingDescriptor.m_PadLeft = 1;
254 poolingDescriptor.m_PadRight = 1;
255 poolingDescriptor.m_PadTop = 1;
256 poolingDescriptor.m_PadBottom = 1;
257 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
258 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
259
260 // Construct the graph
261 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
262 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
263 "pooling1 layer", outputInfo);
264 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
265 "pooling2 layer", outputInfo);
266 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
267 "pooling3 layer", outputInfo);
268 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
269
270 // Connect the network
271 inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
272 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
273 pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
274 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
275
276 // Create the subgraph view for the whole network
277 return CreateSubgraphViewFrom(CreateInputsFrom({pooling1Layer}),
278 CreateOutputsFrom({pooling3Layer}),
279 {pooling1Layer,
280 pooling2Layer,
281 pooling3Layer});
282}
283
284// Creates a simple subgraph with only one convolution layer, supported by the mock backend
285SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
286{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000287 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
288 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
289 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100290 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
291
292 Convolution2dDescriptor convolutionDescriptor;
293 convolutionDescriptor.m_StrideX = 1;
294 convolutionDescriptor.m_StrideY = 1;
295 convolutionDescriptor.m_BiasEnabled = true;
296 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
297
298 // Construct the graph
299 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
300 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
301 "conv layer", weightInfo, biasInfo, outputInfo);
302 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
303
304 // Connect the network
305 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
306 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
307
308 // Create the subgraph view for the whole network
309 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
310 CreateOutputsFrom({convLayer}),
311 {convLayer});
312}
313
314// Creates a subgraph with five convolutions layers, all supported by the mock backend
315SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
316{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000317 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
318 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
319 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100320 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
321
322 Convolution2dDescriptor convolutionDescriptor;
323 convolutionDescriptor.m_StrideX = 1;
324 convolutionDescriptor.m_StrideY = 1;
325 convolutionDescriptor.m_BiasEnabled = true;
326 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
327
328 // Construct the graph
329 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
330 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
331 "conv1 layer", weightInfo, biasInfo, outputInfo);
332 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
333 "conv2 layer", weightInfo, biasInfo, outputInfo);
334 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
335 "conv3 layer", weightInfo, biasInfo, outputInfo);
336 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
337 "conv4 layer", weightInfo, biasInfo, outputInfo);
338 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
339 "conv5 layer", weightInfo, biasInfo, outputInfo);
340 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
341
342 // Connect the network
343 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
344 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
345 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
346 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
347 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
348 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
349
350 // Create the subgraph view for the whole network
351 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
352 CreateOutputsFrom({conv5Layer}),
353 {conv1Layer,
354 conv2Layer,
355 conv3Layer,
356 conv4Layer,
357 conv5Layer});
358}
359
360// Creates a subgraph with both supported and unsupported layers
361// (only convolutions are unsupported by the mock backend)
362SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
363{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000364 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
365 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
366 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100367 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
368
369 Convolution2dDescriptor convolutionDescriptor;
370 convolutionDescriptor.m_StrideX = 1;
371 convolutionDescriptor.m_StrideY = 1;
372 convolutionDescriptor.m_BiasEnabled = true;
373 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
374
375 Pooling2dDescriptor poolingDescriptor;
376 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
377 poolingDescriptor.m_PoolWidth = 2;
378 poolingDescriptor.m_PoolHeight = 2;
379 poolingDescriptor.m_StrideX = 2;
380 poolingDescriptor.m_StrideY = 2;
381 poolingDescriptor.m_PadLeft = 1;
382 poolingDescriptor.m_PadRight = 1;
383 poolingDescriptor.m_PadTop = 1;
384 poolingDescriptor.m_PadBottom = 1;
385 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
386 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
387
388 // Construct the graph
389 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
390 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
391 "conv1 layer", weightInfo, biasInfo, outputInfo);
392 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
393 "pooling1 layer", outputInfo);
394 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
395 "pooling2 layer", outputInfo);
396 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
397 "conv2 layer", weightInfo, biasInfo, outputInfo);
398 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
399 "pooling3 layer", outputInfo);
400 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
401
402 // Connect the network
403 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
404 conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
405 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
406 pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
407 conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
408 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
409
410 // Create the subgraph view for the whole network
411 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
412 CreateOutputsFrom({pooling3Layer}),
413 {conv1Layer,
414 pooling1Layer,
415 pooling2Layer,
416 conv2Layer,
417 pooling3Layer});
418}
419
420// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
421SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
422{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000423 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
424 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
425 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100426 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
427
428 Convolution2dDescriptor convolutionDescriptor;
429 convolutionDescriptor.m_StrideX = 1;
430 convolutionDescriptor.m_StrideY = 1;
431 convolutionDescriptor.m_BiasEnabled = true;
432 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
433
434 // Construct the graph
435 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
436 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
437 "conv layer unoptimizable", weightInfo, biasInfo,
438 outputInfo);
439 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
440
441 // Connect the network
442 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
443 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
444
445 // Create the subgraph view for the whole network
446 return CreateSubgraphViewFrom(CreateInputsFrom({convLayer}),
447 CreateOutputsFrom({convLayer}),
448 {convLayer});
449}
450
451// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
452SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
453{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000454 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
455 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
456 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100457 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
458
459 Convolution2dDescriptor convolutionDescriptor;
460 convolutionDescriptor.m_StrideX = 1;
461 convolutionDescriptor.m_StrideY = 1;
462 convolutionDescriptor.m_BiasEnabled = true;
463 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
464
465 // Construct the graph
466 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
467 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
468 "conv1 layer", weightInfo, biasInfo, outputInfo);
469 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
470 "conv2 layer unoptimizable", weightInfo, biasInfo,
471 outputInfo);
472 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
473 "conv3 layer", weightInfo, biasInfo, outputInfo);
474 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
475 "conv4 layer unoptimizable", weightInfo, biasInfo,
476 outputInfo);
477 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
478 "conv5 layer", weightInfo, biasInfo, outputInfo);
479 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
480
481 // Connect the network
482 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
483 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
484 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
485 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
486 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
487 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
488
489 // Create the subgraph view for the whole network
490 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer}),
491 CreateOutputsFrom({conv5Layer}),
492 {conv1Layer,
493 conv2Layer,
494 conv3Layer,
495 conv4Layer,
496 conv5Layer});
497}
498
499// Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
500// this is meant to test input slots coming from different layers
501SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
502{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000503 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
504 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
505 const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100506 const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
507
508 Convolution2dDescriptor convolutionDescriptor;
509 convolutionDescriptor.m_StrideX = 1;
510 convolutionDescriptor.m_StrideY = 1;
511 convolutionDescriptor.m_BiasEnabled = true;
512 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
513
514 // Construct the graph
515 Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
516 Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
517 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
518 "conv1 layer", weightInfo, biasInfo, outputInfo);
519 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
520 "conv2 layer unoptimizable", weightInfo, biasInfo,
521 outputInfo);
522 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
523 "conv3 layer", weightInfo, biasInfo, outputInfo);
524 AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
525 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
526
527 // Connect the network
528 input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
529 input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
530 conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
531 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
532 conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
533 addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
534
535 // Create the subgraph view for the whole network
536 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
537 conv2Layer}),
538 CreateOutputsFrom({addLayer}),
539 {conv1Layer,
540 conv2Layer,
541 conv3Layer,
542 addLayer});
543}
544
545// The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
546void FullyUnsupporteSubgraphTestImpl1()
547{
548 Graph graph;
549 LayerNameToLayerMap layersInGraph;
550
551 // Create an unsupported subgraph
552 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100553 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100554
555 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
556 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
557 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
558
Sadik Armagan1625efc2021-06-10 18:24:34 +0100559 CHECK(subgraphInputSlots.size() == 1);
560 CHECK(subgraphOutputSlots.size() == 1);
561 CHECK(subgraphLayers.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100562
Sadik Armagan1625efc2021-06-10 18:24:34 +0100563 CHECK(Contains(layersInGraph, "pooling layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100564
565 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000566 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100567 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100568 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100569
570 // Optimize the subgraph
571 OptimizationViews optimizationViews;
572
573 // Check that the optimization is carried out correctly, but no optimization is performed
Sadik Armagan1625efc2021-06-10 18:24:34 +0100574 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100575
576 // =======================================================================
577 // The expected results are:
578 // - No substitutions
579 // - Exactly one failed subgraph, corresponding to the whole original one
580 // - No untouched subgraphs
581 // =======================================================================
582
583 // -----------------------
584 // Check the substitutions
585 // -----------------------
586
Sadik Armagan1625efc2021-06-10 18:24:34 +0100587 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100588
589 // --------------------------
590 // Check the failed subgraphs
591 // --------------------------
592
593 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100594 CHECK(failedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100595
596 CheckFailedSubgraph(failedSubgraphs.at(0),
597 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
598 subgraphInputSlots,
599 subgraphOutputSlots,
600 subgraphLayers);
601
602 // -----------------------------
603 // Check the untouched subgraphs
604 // -----------------------------
605
Sadik Armagan1625efc2021-06-10 18:24:34 +0100606 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100607}
608
609// The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
610void FullyUnsupporteSubgraphTestImpl2()
611{
612 Graph graph;
613 LayerNameToLayerMap layersInGraph;
614
615 // Create an unsupported subgraph
616 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100617 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100618
619 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
620 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
621 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
622
Sadik Armagan1625efc2021-06-10 18:24:34 +0100623 CHECK(subgraphInputSlots.size() == 1);
624 CHECK(subgraphOutputSlots.size() == 1);
625 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100626
Sadik Armagan1625efc2021-06-10 18:24:34 +0100627 CHECK(Contains(layersInGraph, "pooling1 layer"));
628 CHECK(Contains(layersInGraph, "pooling2 layer"));
629 CHECK(Contains(layersInGraph, "pooling3 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100630
631 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000632 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100633 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100634 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100635
636 // Optimize the subgraph
637 OptimizationViews optimizationViews;
638
639 // Check that the optimization is carried out correctly, but no optimization is performed
Sadik Armagan1625efc2021-06-10 18:24:34 +0100640 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100641
642 // =======================================================================
643 // The expected results are:
644 // - No substitutions
645 // - Exactly one failed subgraph, corresponding to the whole original one
646 // - No untouched subgraphs
647 // =======================================================================
648
649 // -----------------------
650 // Check the substitutions
651 // -----------------------
652
Sadik Armagan1625efc2021-06-10 18:24:34 +0100653 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100654
655 // --------------------------
656 // Check the failed subgraphs
657 // --------------------------
658
659 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100660 CHECK(failedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100661
Sadik Armagan1625efc2021-06-10 18:24:34 +0100662 std::list<Layer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
663 layersInGraph.at("pooling2 layer"),
664 layersInGraph.at("pooling3 layer") };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100665
666 const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
667
668 CheckFailedSubgraph(failedSubgraph,
669 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
670 subgraphInputSlots,
671 subgraphOutputSlots,
672 subgraphLayers);
673
674 const SubgraphView::Layers& failedSubgraphLayers = failedSubgraph.GetLayers();
675
Sadik Armagan1625efc2021-06-10 18:24:34 +0100676 CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
677 CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
678 CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100679
680 // -----------------------------
681 // Check the untouched subgraphs
682 // -----------------------------
683
Sadik Armagan1625efc2021-06-10 18:24:34 +0100684 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100685}
686
687// A simple case with only one layer (convolution) to optimize, supported by the mock backend
688void FullyOptimizableSubgraphTestImpl1()
689{
690 Graph graph;
691 LayerNameToLayerMap layersInGraph;
692
693 // Create a fully optimizable subgraph
694 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100695 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100696
697 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
698 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
699 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
700
Sadik Armagan1625efc2021-06-10 18:24:34 +0100701 CHECK(subgraphInputSlots.size() == 1);
702 CHECK(subgraphOutputSlots.size() == 1);
703 CHECK(subgraphLayers.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100704
Sadik Armagan1625efc2021-06-10 18:24:34 +0100705 CHECK(Contains(layersInGraph, "conv layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100706
707 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000708 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100709 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100710 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100711
712 // Optimize the subgraph
713 OptimizationViews optimizationViews;
714
715 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +0100716 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100717
718 // ===========================================================================================
719 // The expected results are:
720 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
721 // - No failed subgraphs
722 // - No untouched subgraphs
723 // ===========================================================================================
724
725 // -----------------------
726 // Check the substitutions
727 // -----------------------
728
729 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100730 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100731
732 CheckSubstitution(substitutions.at(0),
733 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
734 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
735 subgraphInputSlots,
736 subgraphOutputSlots,
737 subgraphLayers);
738
739 // --------------------------
740 // Check the failed subgraphs
741 // --------------------------
742
Sadik Armagan1625efc2021-06-10 18:24:34 +0100743 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100744
745 // -----------------------------
746 // Check the untouched subgraphs
747 // -----------------------------
748
Sadik Armagan1625efc2021-06-10 18:24:34 +0100749 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100750}
751
752// A case with five layers (all convolutions) to optimize, all supported by the mock backend
753void FullyOptimizableSubgraphTestImpl2()
754{
755 Graph graph;
756 LayerNameToLayerMap layersInGraph;
757
758 // Create a fully optimizable subgraph
759 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100760 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100761
762 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
763 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
764 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
765
Sadik Armagan1625efc2021-06-10 18:24:34 +0100766 CHECK(subgraphPtr->GetInputSlots().size() == 1);
767 CHECK(subgraphPtr->GetOutputSlots().size() == 1);
768 CHECK(subgraphPtr->GetLayers().size() == 5);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100769
Sadik Armagan1625efc2021-06-10 18:24:34 +0100770 CHECK(Contains(layersInGraph, "conv1 layer"));
771 CHECK(Contains(layersInGraph, "conv2 layer"));
772 CHECK(Contains(layersInGraph, "conv3 layer"));
773 CHECK(Contains(layersInGraph, "conv4 layer"));
774 CHECK(Contains(layersInGraph, "conv5 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100775
776 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000777 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100778 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100779 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100780
781 // Optimize the subgraph
782 OptimizationViews optimizationViews;
783
784 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +0100785 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100786
787 // ===========================================================================================
788 // The expected results are:
789 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
790 // - No failed subgraphs
791 // - No untouched subgraphs
792 // ===========================================================================================
793
794 // -----------------------
795 // Check the substitutions
796 // -----------------------
797
798 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100799 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100800
801 std::list<Layer*> expectedSubstitutableLayers{ layersInGraph.at("conv1 layer"),
802 layersInGraph.at("conv2 layer"),
803 layersInGraph.at("conv3 layer"),
804 layersInGraph.at("conv4 layer"),
805 layersInGraph.at("conv5 layer") };
806
807 const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
808
809 CheckSubstitution(substitution,
810 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
811 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
812 subgraphInputSlots,
813 subgraphOutputSlots,
814 expectedSubstitutableLayers);
815
816 const SubgraphView::Layers& substitutableSubgraphLayers = substitution.m_SubstitutableSubgraph.GetLayers();
817
Sadik Armagan1625efc2021-06-10 18:24:34 +0100818 CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
819 CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
820 CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
821 CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
822 CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100823
824 // --------------------------
825 // Check the failed subgraphs
826 // --------------------------
827
Sadik Armagan1625efc2021-06-10 18:24:34 +0100828 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100829
830 // -----------------------------
831 // Check the untouched subgraphs
832 // -----------------------------
833
Sadik Armagan1625efc2021-06-10 18:24:34 +0100834 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100835}
836
837// The input subgraph contaions both supported and unsupported layers
838// (but only convolutions are unsupported by the mock backend)
839void PartiallySupportedSubgraphTestImpl()
840{
841 Graph graph;
842 LayerNameToLayerMap layersInGraph;
843
844 // Create a fully optimizable subgraph
845 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100846 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100847
848 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
849 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
850 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
851
Sadik Armagan1625efc2021-06-10 18:24:34 +0100852 CHECK(subgraphInputSlots.size() == 1);
853 CHECK(subgraphOutputSlots.size() == 1);
854 CHECK(subgraphLayers.size() == 5);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100855
Sadik Armagan1625efc2021-06-10 18:24:34 +0100856 CHECK(Contains(layersInGraph, "conv1 layer"));
857 CHECK(Contains(layersInGraph, "pooling1 layer"));
858 CHECK(Contains(layersInGraph, "pooling2 layer"));
859 CHECK(Contains(layersInGraph, "conv2 layer"));
860 CHECK(Contains(layersInGraph, "pooling3 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100861
862 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000863 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100864 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100865 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100866
867 // Optimize the subgraph
868 OptimizationViews optimizationViews;
869
870 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +0100871 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100872
873 // ========================================================================
874 // The expected results are:
875 // - Exactly two substitution, corresponding to the supported layers
876 // - Exactly two failed subgraphs, corresponding to the unsupported layers
877 // - No untouched subgraphs
878 // ========================================================================
879
880 // -----------------------
881 // Check the substitutions
882 // -----------------------
883
Rob Hughes30db8ad2019-11-08 15:50:10 +0000884 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100885 CHECK(substitutions.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +0000886 // Sort into a consistent order
887 std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
888 return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
889 s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0;
890 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100891
892 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
893 { 1, 1, 1 } };
894 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
895 { 1, 1, 1 } };
896 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
897 {
898 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
899 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlots())
900 };
901 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
902 {
903 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
904 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots())
905 };
906 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
907 {
908 { layersInGraph.at("conv1 layer") },
909 { layersInGraph.at("conv2 layer") }
910 };
911
912 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
913 {
914 CheckSubstitution(substitutions.at(substitutionIndex),
915 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
916 expectedReplacementSubgraphSizes.at(substitutionIndex),
917 expectedSubstitutableInputSlots.at(substitutionIndex),
918 expectedSubstitutableOutputSlots.at(substitutionIndex),
919 expectedSubstitutableLayers.at(substitutionIndex));
920 }
921
922 // --------------------------
923 // Check the failed subgraphs
924 // --------------------------
925
Rob Hughes30db8ad2019-11-08 15:50:10 +0000926 OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100927 CHECK(failedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +0000928 // Sort into a consistent order
929 std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
930 return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
931 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100932
933 std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
934 { 1, 1, 1 } };
935 std::vector<SubgraphView::InputSlots> expectedFailedInputSlots
936 {
937 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots()),
938 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots())
939 };
940 std::vector<SubgraphView::OutputSlots> expectedFailedOutputSlots
941 {
942 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots()),
943 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots())
944 };
945 std::vector<SubgraphView::Layers> expectedFailedLayers
946 {
947 { layersInGraph.at("pooling1 layer"),
948 layersInGraph.at("pooling2 layer") },
949 { layersInGraph.at("pooling3 layer") }
950 };
951
952 for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
953 {
954 CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
955 expectedFailedSubgraphSizes.at(failedIndex),
956 expectedFailedInputSlots.at(failedIndex),
957 expectedFailedOutputSlots.at(failedIndex),
958 expectedFailedLayers.at(failedIndex));
959 }
960
961 // -----------------------------
962 // Check the untouched subgraphs
963 // -----------------------------
964
Sadik Armagan1625efc2021-06-10 18:24:34 +0100965 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100966}
967
968// The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
969void FullyUnoptimizableSubgraphTestImpl1()
970{
971 Graph graph;
972 LayerNameToLayerMap layersInGraph;
973
974 // Create a fully optimizable subgraph
975 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100976 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100977
978 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
979 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
980 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
981
Sadik Armagan1625efc2021-06-10 18:24:34 +0100982 CHECK(subgraphInputSlots.size() == 1);
983 CHECK(subgraphOutputSlots.size() == 1);
984 CHECK(subgraphLayers.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100985
Sadik Armagan1625efc2021-06-10 18:24:34 +0100986 CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100987
988 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000989 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100990 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100991 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100992
993 // Optimize the subgraph
994 OptimizationViews optimizationViews;
995
996 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +0100997 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100998
999 // ============================================================================
1000 // The expected results are:
1001 // - No substitutions
1002 // - No failed subgraphs
1003 // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1004 // ============================================================================
1005
1006 // -----------------------
1007 // Check the substitutions
1008 // -----------------------
1009
Sadik Armagan1625efc2021-06-10 18:24:34 +01001010 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001011
1012 // --------------------------
1013 // Check the failed subgraphs
1014 // --------------------------
1015
Sadik Armagan1625efc2021-06-10 18:24:34 +01001016 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001017
1018 // -----------------------------
1019 // Check the untouched subgraphs
1020 // -----------------------------
1021
1022 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001023 CHECK(untouchedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001024
1025 CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1026 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
1027 subgraphInputSlots,
1028 subgraphOutputSlots,
1029 subgraphLayers);
1030}
1031
1032// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1033void PartiallyOptimizableSubgraphTestImpl1()
1034{
1035 Graph graph;
1036 LayerNameToLayerMap layersInGraph;
1037
1038 // Create a fully optimizable subgraph
1039 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001040 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001041
1042 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1043 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1044 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1045
Sadik Armagan1625efc2021-06-10 18:24:34 +01001046 CHECK(subgraphInputSlots.size() == 1);
1047 CHECK(subgraphOutputSlots.size() == 1);
1048 CHECK(subgraphLayers.size() == 5);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001049
Sadik Armagan1625efc2021-06-10 18:24:34 +01001050 CHECK(Contains(layersInGraph, "conv1 layer"));
1051 CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1052 CHECK(Contains(layersInGraph, "conv3 layer"));
1053 CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1054 CHECK(Contains(layersInGraph, "conv5 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001055
1056 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001057 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001058 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001059 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001060
1061 // Optimize the subgraph
1062 OptimizationViews optimizationViews;
1063
1064 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001065 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001066
1067 // ===============================================================================
1068 // The expected results are:
1069 // - Exactly three substitutions, corresponding to the optimizable layers
1070 // - No failed subgraphs
1071 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1072 // ===============================================================================
1073
1074 // -----------------------
1075 // Check the substitutions
1076 // -----------------------
1077
Rob Hughes30db8ad2019-11-08 15:50:10 +00001078 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001079 CHECK(substitutions.size() == 3);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001080 // Sort into a consistent order
1081 std::sort(substitutions.begin(), substitutions.end(),
1082 [](auto s1, auto s2) { return strcmp(s1.m_SubstitutableSubgraph.GetLayers().front()->GetName(),
1083 s2.m_SubstitutableSubgraph.GetLayers().front()->GetName()) < 0; });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001084
1085 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 1 },
1086 { 1, 1, 1 },
1087 { 1, 1, 1 } };
1088 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1089 { 1, 1, 1 },
1090 { 1, 1, 1 } };
1091 std::vector<SubgraphView::InputSlots> expectedSubstitutableInputSlots
1092 {
1093 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()),
1094 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()),
1095 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlots())
1096 };
1097 std::vector<SubgraphView::OutputSlots> expectedSubstitutableOutputSlots
1098 {
1099 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots()),
1100 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots()),
1101 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots())
1102 };
1103 std::vector<SubgraphView::Layers> expectedSubstitutableLayers
1104 {
1105 { layersInGraph.at("conv1 layer") },
1106 { layersInGraph.at("conv3 layer") },
1107 { layersInGraph.at("conv5 layer") }
1108 };
1109
1110 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1111 {
1112 CheckSubstitution(substitutions.at(substitutionIndex),
1113 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1114 expectedReplacementSubgraphSizes.at(substitutionIndex),
1115 expectedSubstitutableInputSlots.at(substitutionIndex),
1116 expectedSubstitutableOutputSlots.at(substitutionIndex),
1117 expectedSubstitutableLayers.at(substitutionIndex));
1118 }
1119
1120 // --------------------------
1121 // Check the failed subgraphs
1122 // --------------------------
1123
Sadik Armagan1625efc2021-06-10 18:24:34 +01001124 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001125
1126 // -----------------------------
1127 // Check the untouched subgraphs
1128 // -----------------------------
1129
Rob Hughes30db8ad2019-11-08 15:50:10 +00001130 OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001131 CHECK(untouchedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001132 // Sort into a consistent order
1133 std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1134 return strcmp(s1.GetLayers().front()->GetName(), s2.GetLayers().front()->GetName()) < 0;
1135 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001136
1137 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 },
1138 { 1, 1, 1 } };
1139 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1140 {
1141 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots()),
1142 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetInputSlots())
1143 };
1144 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1145 {
1146 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()),
1147 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots())
1148 };
1149 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1150 {
1151 { layersInGraph.at("conv2 layer unoptimizable") },
1152 { layersInGraph.at("conv4 layer unoptimizable") }
1153 };
1154
1155 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1156 {
1157 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1158 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1159 expectedUntouchedInputSlots.at(untouchedIndex),
1160 expectedUntouchedOutputSlots.at(untouchedIndex),
1161 expectedUntouchedLayers.at(untouchedIndex));
1162 }
1163}
1164
1165// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1166// this is meant to test input slots coming from different layers
1167void PartiallyOptimizableSubgraphTestImpl2()
1168{
1169 Graph graph;
1170 LayerNameToLayerMap layersInGraph;
1171
Rob Hughes30db8ad2019-11-08 15:50:10 +00001172 // Create a partially optimizable subgraph
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001173 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001174 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001175
1176 const SubgraphView::InputSlots& subgraphInputSlots = subgraphPtr->GetInputSlots();
1177 const SubgraphView::OutputSlots& subgraphOutputSlots = subgraphPtr->GetOutputSlots();
1178 const SubgraphView::Layers& subgraphLayers = subgraphPtr->GetLayers();
1179
Sadik Armagan1625efc2021-06-10 18:24:34 +01001180 CHECK(subgraphInputSlots.size() == 2);
1181 CHECK(subgraphOutputSlots.size() == 1);
1182 CHECK(subgraphLayers.size() == 4);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001183
Sadik Armagan1625efc2021-06-10 18:24:34 +01001184 CHECK(Contains(layersInGraph, "conv1 layer"));
1185 CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1186 CHECK(Contains(layersInGraph, "conv3 layer"));
1187 CHECK(Contains(layersInGraph, "add layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001188
1189 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001190 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001191 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001192 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001193
1194 // Optimize the subgraph
1195 OptimizationViews optimizationViews;
1196
1197 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001198 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001199
1200 // ==============================================================================
1201 // The expected results are:
1202 // - Exactly one substitution, corresponding to the optimizable layers
1203 // - No failed subgraphs
1204 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1205 // ==============================================================================
1206
1207 // -----------------------
1208 // Check the substitutions
1209 // -----------------------
1210
1211 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001212 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001213
Rob Hughes30db8ad2019-11-08 15:50:10 +00001214 ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 3 };
1215 ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001216
Rob Hughes30db8ad2019-11-08 15:50:10 +00001217 SubgraphView::InputSlots expectedSubstitutableInputSlots = {
1218 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0]),
1219 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001220 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001221 SubgraphView::OutputSlots expectedSubstitutableOutputSlots =
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001222 {
Rob Hughes30db8ad2019-11-08 15:50:10 +00001223 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()[0])
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001224 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001225 SubgraphView::Layers expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001226 {
Rob Hughes30db8ad2019-11-08 15:50:10 +00001227 layersInGraph.at("conv1 layer"),
1228 layersInGraph.at("conv3 layer"),
1229 layersInGraph.at("add layer")
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001230 };
1231
Rob Hughes30db8ad2019-11-08 15:50:10 +00001232 CheckSubstitution(substitutions[0],
1233 expectedSubstitutableSubgraphSizes,
1234 expectedReplacementSubgraphSizes,
1235 expectedSubstitutableInputSlots,
1236 expectedSubstitutableOutputSlots,
1237 expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001238
1239 // --------------------------
1240 // Check the failed subgraphs
1241 // --------------------------
1242
Sadik Armagan1625efc2021-06-10 18:24:34 +01001243 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001244
1245 // -----------------------------
1246 // Check the untouched subgraphs
1247 // -----------------------------
1248
1249 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001250 CHECK(untouchedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001251
1252 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 1 } };
1253 std::vector<SubgraphView::InputSlots> expectedUntouchedInputSlots
1254 {
1255 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetInputSlots())
1256 };
1257 std::vector<SubgraphView::OutputSlots> expectedUntouchedOutputSlots
1258 {
1259 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())
1260 };
1261 std::vector<SubgraphView::Layers> expectedUntouchedLayers
1262 {
1263 { layersInGraph.at("conv2 layer unoptimizable") }
1264 };
1265
1266 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1267 {
1268 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1269 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1270 expectedUntouchedInputSlots.at(untouchedIndex),
1271 expectedUntouchedOutputSlots.at(untouchedIndex),
1272 expectedUntouchedLayers.at(untouchedIndex));
1273 }
1274}
1275
1276} // Anonymous namespace
1277
Sadik Armagan1625efc2021-06-10 18:24:34 +01001278TEST_SUITE("OptimizeSubGraph")
1279{
1280TEST_CASE("FullyUnsupportedSubgraph1") { FullyUnsupporteSubgraphTestImpl1(); }
1281TEST_CASE("FullyUnsupportedSubgraph2") { FullyUnsupporteSubgraphTestImpl2(); }
1282TEST_CASE("FullyOptimizableSubgraph1") { FullyOptimizableSubgraphTestImpl1(); }
1283TEST_CASE("FullyOptimizableSubgraph2") { FullyOptimizableSubgraphTestImpl2(); }
1284TEST_CASE("PartiallySupportedSubgraph") { PartiallySupportedSubgraphTestImpl(); }
1285TEST_CASE("FullyUnoptimizableSubgraph") { FullyUnoptimizableSubgraphTestImpl1(); }
1286TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1287TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001288
Sadik Armagan1625efc2021-06-10 18:24:34 +01001289}