blob: 997fe9850d193e9645394e7341e53e2f7331d0d3 [file] [log] [blame]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001//
Mike Kellyec67a0f2022-11-25 13:55:24 +00002// Copyright © 2017,2022 Arm Ltd and Contributors. All rights reserved.
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01003// SPDX-License-Identifier: MIT
4//
5
Sadik Armagana097d2a2021-11-24 15:47:28 +00006#include <CommonTestUtils.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01007#include "MockBackendId.hpp"
8
9#include <Graph.hpp>
10#include <Network.hpp>
11
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Cathal Corbett3464ba12022-03-04 11:36:39 +000013#include <armnnTestUtils/MockBackend.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010014
Sadik Armagan1625efc2021-06-10 18:24:34 +010015#include <doctest/doctest.h>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010016#include <unordered_map>
17
18using namespace armnn;
19
20namespace
21{
22
23// The expected number of layers, input and output slots in a subgraph after a test
24struct ExpectedSubgraphSize
25{
26 size_t m_NumInputSlots = 0;
27 size_t m_NumOutputSlots = 0;
28 size_t m_NumLayers = 0;
29};
30
31// Keep the layers organized by layer name
32using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33
34// Used to convert input and output slots from reference type (as stored in graphs) to
35// pointer type (as stored in subgraphs)
36template <typename SlotType>
37SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38{
39 return const_cast<SlotType*>(&input);
40}
41
42// Used to convert input and output slots from reference type (as stored in graphs) to
43// pointer type (as stored in subgraphs), array version
44template <typename SlotType>
45std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46{
47 std::vector<SlotType*> output;
48 std::transform(input.begin(),
49 input.end(),
50 std::back_inserter(output),
51 [](const SlotType& inputItem)
52 {
53 return ConvertReferenceTypeToPointerType(inputItem);
54 });
55
56 return output;
57}
58
Francis Murtagh56ccf682021-12-13 18:48:12 +000059// Convert from vector of Slots* (Input/Output) to vector of ISlots* (IInput/IOutput)
60template <typename SlotType, typename ResultSlotType>
61std::vector<ResultSlotType*> ConvertSlotsToISlots(const std::vector<SlotType*> input)
62{
63 std::vector<ResultSlotType*> output;
64 for (auto slot : input)
65 {
66 output.push_back(PolymorphicDowncast<ResultSlotType*>(slot));
67 }
68 return output;
69}
70
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010071// Convenience function to add an input layer to a graph
72Layer* AddInputLayer(Graph& graph,
73 const std::string& layerName,
74 const TensorInfo& inputInfo,
75 LayerBindingId inputId = 0)
76{
77 Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010078 CHECK(inputLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010079 inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
80 return inputLayer;
81}
82
83// Convenience function to add an output layer to a graph
84Layer* AddOutputLayer(Graph& graph,
85 const std::string& layerName)
86{
87 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010088 CHECK(outputLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010089 return outputLayer;
90}
91
92// Convenience function to add a convolution layer to a graph
93Convolution2dLayer* AddConvolutionLayer(Graph& graph,
94 LayerNameToLayerMap& layersInGraph,
95 const Convolution2dDescriptor& convolutionDescriptor,
96 const std::string& layerName,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010097 const TensorInfo& outputInfo)
98{
99 Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100100 CHECK(convLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100101 convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
102 layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
103 return convLayer;
104}
105
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100106// Convenience function to add a constant layer to a graph
107ConstantLayer* AddConstantLayer(Graph& graph,
108 LayerNameToLayerMap& layersInGraph,
109 const std::string& layerName,
110 const ConstTensor& constTensor,
111 const TensorInfo& outputInfo)
112{
113 ConstantLayer* const constantLayer = graph.AddLayer<ConstantLayer>(layerName.c_str());
114 CHECK(constantLayer);
115 constantLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
116 constantLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
117 layersInGraph.insert(std::make_pair(constantLayer->GetName(), constantLayer));
118 return constantLayer;
119}
120
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100121// Convenience function to add a pooling layer to a graph
122Pooling2dLayer* AddPoolingLayer(Graph& graph,
123 LayerNameToLayerMap& layersInGraph,
124 const Pooling2dDescriptor& poolingDescriptor,
125 const std::string& layerName,
126 const TensorInfo& outputInfo)
127{
128 Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100129 CHECK(poolingLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100130 poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
131 layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
132 return poolingLayer;
133}
134
135// Convenience function to add an addition layer to a graph
136AdditionLayer* AddAdditionaLayer(Graph& graph,
137 LayerNameToLayerMap& layersInGraph,
138 const std::string& layerName,
139 const TensorInfo& outputInfo)
140{
141 AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100142 CHECK(additionLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100143 additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
144 layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
145 return additionLayer;
146}
147
148// Convenience function to check that the given substitution matches the specified expected values
149void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
150 const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
151 const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
Francis Murtagh56ccf682021-12-13 18:48:12 +0000152 const SubgraphView::IInputSlots& expectedSubstitutableInputSlots,
153 const SubgraphView::IOutputSlots& expectedSubstitutableOutputSlots,
154 const SubgraphView::IConnectableLayers& expectedSubstitutableLayers)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100155{
Francis Murtagh56ccf682021-12-13 18:48:12 +0000156 const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
157 const SubgraphView::IInputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetIInputSlots();
158 const SubgraphView::IOutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetIOutputSlots();
159 const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
160 substitutableSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100161
Francis Murtagh56ccf682021-12-13 18:48:12 +0000162 const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
163 const SubgraphView::IInputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetIInputSlots();
164 const SubgraphView::IOutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetIOutputSlots();
165 const SubgraphView::IConnectableLayers& replacementSubgraphLayers = replacementSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100166
Sadik Armagan1625efc2021-06-10 18:24:34 +0100167 CHECK(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
168 CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
169 CHECK(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100170
Sadik Armagan1625efc2021-06-10 18:24:34 +0100171 CHECK(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
172 CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
173 CHECK(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100174
Sadik Armagan1625efc2021-06-10 18:24:34 +0100175 CHECK(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
176 CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
177 CHECK(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100178
Sadik Armagan1625efc2021-06-10 18:24:34 +0100179 CHECK(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
180 CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
181 CHECK(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100182
Sadik Armagan1625efc2021-06-10 18:24:34 +0100183 CHECK(std::all_of(replacementSubgraphLayers.begin(),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100184 replacementSubgraphLayers.end(),
Francis Murtagh56ccf682021-12-13 18:48:12 +0000185 [](const IConnectableLayer* layer)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100186 {
187 return layer->GetType() == LayerType::PreCompiled;
188 }));
189}
190
191// Convenience function to check that the given failed subgraph matches the specified expected values
192void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
193 const ExpectedSubgraphSize& expectedFailedSubgraphSize,
Francis Murtagh56ccf682021-12-13 18:48:12 +0000194 const SubgraphView::IInputSlots& expectedFailedInputSlots,
195 const SubgraphView::IOutputSlots& expectedFailedOutputSlots,
196 const SubgraphView::IConnectableLayers& expectedFailedLayers)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100197{
Francis Murtagh56ccf682021-12-13 18:48:12 +0000198 const SubgraphView::IInputSlots& failedSubgraphInputSlots = failedSubgraph.GetIInputSlots();
199 const SubgraphView::IOutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetIOutputSlots();
200 const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100201
Sadik Armagan1625efc2021-06-10 18:24:34 +0100202 CHECK(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
203 CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
204 CHECK(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100205
Sadik Armagan1625efc2021-06-10 18:24:34 +0100206 CHECK(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
207 CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
208 CHECK(AreEqual(failedSubgraphLayers, expectedFailedLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100209}
210
211// Convenience function to check that the given untouched subgraph matches the specified expected values
212void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
213 const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
Francis Murtagh56ccf682021-12-13 18:48:12 +0000214 const SubgraphView::IInputSlots& expectedUntouchedInputSlots,
215 const SubgraphView::IOutputSlots& expectedUntouchedOutputSlots,
216 const SubgraphView::IConnectableLayers& expectedUntouchedLayers)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100217{
Francis Murtagh56ccf682021-12-13 18:48:12 +0000218 const SubgraphView::IInputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetIInputSlots();
219 const SubgraphView::IOutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetIOutputSlots();
220 const SubgraphView::IConnectableLayers& untouchedSubgraphLayers = untouchedSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100221
Sadik Armagan1625efc2021-06-10 18:24:34 +0100222 CHECK(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
223 CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
224 CHECK(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100225
Sadik Armagan1625efc2021-06-10 18:24:34 +0100226 CHECK(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
227 CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
228 CHECK(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100229}
230
231// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
232SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
233{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000234 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
235 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100236
237 Pooling2dDescriptor poolingDescriptor;
238 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
239 poolingDescriptor.m_PoolWidth = 2;
240 poolingDescriptor.m_PoolHeight = 2;
241 poolingDescriptor.m_StrideX = 2;
242 poolingDescriptor.m_StrideY = 2;
243 poolingDescriptor.m_PadLeft = 1;
244 poolingDescriptor.m_PadRight = 1;
245 poolingDescriptor.m_PadTop = 1;
246 poolingDescriptor.m_PadBottom = 1;
247 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
248 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
249
250 // Construct the graph
251 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
252 Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
253 "pooling layer", outputInfo);
254 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
255
256 // Connect the network
257 inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
258 poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
259
260 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100261 return CreateSubgraphViewFrom(CreateInputsFrom(poolingLayer),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100262 CreateOutputsFrom({poolingLayer}),
263 {poolingLayer});
264}
265
266// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
267SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
268{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000269 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
270 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100271
272 Pooling2dDescriptor poolingDescriptor;
273 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
274 poolingDescriptor.m_PoolWidth = 2;
275 poolingDescriptor.m_PoolHeight = 2;
276 poolingDescriptor.m_StrideX = 2;
277 poolingDescriptor.m_StrideY = 2;
278 poolingDescriptor.m_PadLeft = 1;
279 poolingDescriptor.m_PadRight = 1;
280 poolingDescriptor.m_PadTop = 1;
281 poolingDescriptor.m_PadBottom = 1;
282 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
283 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
284
285 // Construct the graph
286 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
287 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
288 "pooling1 layer", outputInfo);
289 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
290 "pooling2 layer", outputInfo);
291 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
292 "pooling3 layer", outputInfo);
293 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
294
295 // Connect the network
296 inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
297 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
298 pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
299 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
300
301 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100302 return CreateSubgraphViewFrom(CreateInputsFrom(pooling1Layer),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100303 CreateOutputsFrom({pooling3Layer}),
304 {pooling1Layer,
305 pooling2Layer,
306 pooling3Layer});
307}
308
309// Creates a simple subgraph with only one convolution layer, supported by the mock backend
310SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
311{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000312 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
313 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100314 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
Keith Davis721e6292022-05-17 10:06:53 +0100315 TensorInfo biasInfo({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100316
317 weightInfo.SetConstant(true);
318 biasInfo.SetConstant(true);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100319
320 Convolution2dDescriptor convolutionDescriptor;
321 convolutionDescriptor.m_StrideX = 1;
322 convolutionDescriptor.m_StrideY = 1;
323 convolutionDescriptor.m_BiasEnabled = true;
324 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
325
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100326 std::vector<float> weightsVector(64);
327 ConstTensor constWeightsTensor(weightInfo, weightsVector);
328
329 std::vector<float> biasVector(16);
330 ConstTensor constBiasTensor(biasInfo, biasVector);
331
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100332 // Construct the graph
333 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
334 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000335 "conv layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100336
337 ConstantLayer* const weightsLayer =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000338 AddConstantLayer(graph, layersInGraph, "Weights Layer", constWeightsTensor, weightInfo);
339 ConstantLayer* const biasLayer = AddConstantLayer(graph, layersInGraph, "Bias Layer", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100340
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100341 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
342
343 // Connect the network
344 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100345 weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
346 biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100347 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
348
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100349 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100350 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100351 return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100352 CreateOutputsFrom({convLayer}),
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100353 {convLayer, weightsLayer, biasLayer});
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100354}
355
356// Creates a subgraph with five convolutions layers, all supported by the mock backend
357SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
358{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000359 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
360 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100361 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
362 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
363
364 weightInfo.SetConstant(true);
365 biasInfo.SetConstant(true);
366
367 std::vector<float> weightsVector(64);
368 ConstTensor constWeightsTensor(weightInfo, weightsVector);
369
370 std::vector<float> biasVector(16);
371 ConstTensor constBiasTensor(biasInfo, biasVector);
372
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100373 Convolution2dDescriptor convolutionDescriptor;
374 convolutionDescriptor.m_StrideX = 1;
375 convolutionDescriptor.m_StrideY = 1;
376 convolutionDescriptor.m_BiasEnabled = true;
377 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
378
379 // Construct the graph
380 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
381 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000382 "conv1 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100383 ConstantLayer* const weightsLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000384 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100385 ConstantLayer* const biasLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000386 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100387
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100388 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000389 "conv2 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100390 ConstantLayer* const weightsLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000391 AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100392 ConstantLayer* const biasLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000393 AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100394
395
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100396 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000397 "conv3 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100398 ConstantLayer* const weightsLayer3 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000399 AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100400 ConstantLayer* const biasLayer3 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000401 AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100402
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100403 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000404 "conv4 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100405 ConstantLayer* const weightsLayer4 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000406 AddConstantLayer(graph, layersInGraph, "Weights Layer 4", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100407 ConstantLayer* const biasLayer4 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000408 AddConstantLayer(graph, layersInGraph, "Bias Layer 4", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100409
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100410 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000411 "conv5 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100412 ConstantLayer* const weightsLayer5 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000413 AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100414 ConstantLayer* const biasLayer5 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000415 AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100416
417
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100418 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
419
420 // Connect the network
421 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100422 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
423 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100424
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100425 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
426 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
427 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
428
429 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
430 weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
431 biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
432
433 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
434 weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
435 biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
436
437 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
438 weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
439 biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
440
441 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
442 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100443 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100444 return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
445 CreateOutputsFrom({ conv5Layer }),
446 { weightsLayer1,
447 biasLayer1,
448 conv1Layer,
449 weightsLayer2,
450 biasLayer2,
451 conv2Layer,
452 weightsLayer3,
453 biasLayer3,
454 conv3Layer,
455 weightsLayer4,
456 biasLayer4,
457 conv4Layer,
458 weightsLayer5,
459 biasLayer5,
460 conv5Layer });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100461}
462
463// Creates a subgraph with both supported and unsupported layers
464// (only convolutions are unsupported by the mock backend)
465SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
466{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000467 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
468 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100469 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
470 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
471
472 weightInfo.SetConstant(true);
473 biasInfo.SetConstant(true);
474
475 std::vector<float> weightsVector(64);
476 ConstTensor constWeightsTensor(weightInfo, weightsVector);
477
478 std::vector<float> biasVector(16);
479 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100480
481 Convolution2dDescriptor convolutionDescriptor;
482 convolutionDescriptor.m_StrideX = 1;
483 convolutionDescriptor.m_StrideY = 1;
484 convolutionDescriptor.m_BiasEnabled = true;
485 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
486
487 Pooling2dDescriptor poolingDescriptor;
488 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
489 poolingDescriptor.m_PoolWidth = 2;
490 poolingDescriptor.m_PoolHeight = 2;
491 poolingDescriptor.m_StrideX = 2;
492 poolingDescriptor.m_StrideY = 2;
493 poolingDescriptor.m_PadLeft = 1;
494 poolingDescriptor.m_PadRight = 1;
495 poolingDescriptor.m_PadTop = 1;
496 poolingDescriptor.m_PadBottom = 1;
497 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
498 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
499
500 // Construct the graph
501 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100502 ConstantLayer* const weightsLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000503 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100504
505 ConstantLayer* const biasLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000506 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100507
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100508 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000509 "conv1 layer", outputInfo);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100510 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
511 "pooling1 layer", outputInfo);
512 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
513 "pooling2 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100514
515 ConstantLayer* const weightsLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000516 AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100517
518 ConstantLayer* const biasLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000519 AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100520
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100521 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000522 "conv2 layer", outputInfo);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100523 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
524 "pooling3 layer", outputInfo);
525 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
526
527 // Connect the network
528 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100529 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
530 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100531 conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
532 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
533 pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100534 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
535 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100536 conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
537 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
538
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100539 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100540 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100541 return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100542 CreateOutputsFrom({pooling3Layer}),
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100543 {weightsLayer1,
544 biasLayer1,
545 conv1Layer,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100546 pooling1Layer,
547 pooling2Layer,
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100548 weightsLayer2,
549 biasLayer2,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100550 conv2Layer,
551 pooling3Layer});
552}
553
554// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
555SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
556{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000557 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
558 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100559 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
560 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100561
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100562 weightInfo.SetConstant(true);
563 biasInfo.SetConstant(true);
564
565 std::vector<float> weightsVector(64);
566 ConstTensor constWeightsTensor(weightInfo, weightsVector);
567
568 std::vector<float> biasVector(16);
569 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100570 Convolution2dDescriptor convolutionDescriptor;
571 convolutionDescriptor.m_StrideX = 1;
572 convolutionDescriptor.m_StrideY = 1;
573 convolutionDescriptor.m_BiasEnabled = true;
574 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
575
576 // Construct the graph
577 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100578
579 ConstantLayer* const weightsLayer =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000580 AddConstantLayer(graph, layersInGraph, "Weights Layer unoptimizable", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100581
582 ConstantLayer* const biasLayer =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000583 AddConstantLayer(graph, layersInGraph, "Bias Layer unoptimizable", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100584
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100585 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000586 "conv layer unoptimizable", outputInfo);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100587 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
588
589 // Connect the network
590 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100591 weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
592 biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100593 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
594
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100595 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100596 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100597 return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100598 CreateOutputsFrom({convLayer}),
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100599 {convLayer, weightsLayer, biasLayer});
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100600}
601
602// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
603SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
604{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000605 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
606 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100607 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
608 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
609
610 weightInfo.SetConstant(true);
611 biasInfo.SetConstant(true);
612
613 std::vector<float> weightsVector(64);
614 ConstTensor constWeightsTensor(weightInfo, weightsVector);
615
616 std::vector<float> biasVector(16);
617 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100618
619 Convolution2dDescriptor convolutionDescriptor;
620 convolutionDescriptor.m_StrideX = 1;
621 convolutionDescriptor.m_StrideY = 1;
622 convolutionDescriptor.m_BiasEnabled = true;
623 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
624
625 // Construct the graph
626 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100627
628 ConstantLayer* const weightsLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000629 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100630 ConstantLayer* const biasLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000631 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100632 ConstantLayer* const weightsLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000633 AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100634 ConstantLayer* const biasLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000635 AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100636 ConstantLayer* const weightsLayer3 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000637 AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100638 ConstantLayer* const biasLayer3 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000639 AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100640 ConstantLayer* const weightsLayer4 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000641 AddConstantLayer(graph, layersInGraph, "Weights Layer 4 unoptimizable", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100642 ConstantLayer* const biasLayer4 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000643 AddConstantLayer(graph, layersInGraph, "Bias Layer 4 unoptimizable", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100644 ConstantLayer* const weightsLayer5 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000645 AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100646 ConstantLayer* const biasLayer5 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000647 AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100648
649 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000650 "conv1 layer", outputInfo);
651 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
652 "conv2 layer unoptimizable", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100653 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000654 "conv3 layer", outputInfo);
655 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
656 "conv4 layer unoptimizable", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100657 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000658 "conv5 layer", outputInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100659
660 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100661
662 // Connect the network
663 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100664 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
665 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
666
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100667 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100668 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
669 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
670
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100671 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100672 weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
673 biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
674
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100675 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100676 weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
677 biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
678
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100679 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100680 weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
681 biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
682
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100683 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
684
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100685 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100686 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100687 return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100688 CreateOutputsFrom({conv5Layer}),
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100689 {weightsLayer1,
690 biasLayer1,
691 conv1Layer,
692 weightsLayer2,
693 biasLayer2,
694 conv2Layer,
695 weightsLayer3,
696 biasLayer3,
697 conv3Layer,
698 weightsLayer4,
699 biasLayer4,
700 conv4Layer,
701 weightsLayer5,
702 biasLayer5,
703 conv5Layer});
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100704}
705
706// Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
707// this is meant to test input slots coming from different layers
708SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
709{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000710 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
711 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100712 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
713 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
714
715 weightInfo.SetConstant(true);
716 biasInfo.SetConstant(true);
717
718 std::vector<float> weightsVector(64);
719 ConstTensor constWeightsTensor(weightInfo, weightsVector);
720
721 std::vector<float> biasVector(16);
722 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100723
724 Convolution2dDescriptor convolutionDescriptor;
725 convolutionDescriptor.m_StrideX = 1;
726 convolutionDescriptor.m_StrideY = 1;
727 convolutionDescriptor.m_BiasEnabled = true;
728 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
729
730 // Construct the graph
731 Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
732 Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100733
734 ConstantLayer* const weightsLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000735 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100736 ConstantLayer* const biasLayer1 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000737 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100738 ConstantLayer* const weightsLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000739 AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100740 ConstantLayer* const biasLayer2 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000741 AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100742 ConstantLayer* const weightsLayer3 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000743 AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, weightInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100744 ConstantLayer* const biasLayer3 =
Mike Kellyec67a0f2022-11-25 13:55:24 +0000745 AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, biasInfo);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100746
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100747 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000748 "conv1 layer", outputInfo);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100749 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000750 "conv2 layer unoptimizable", outputInfo);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100751 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
Mike Kellyec67a0f2022-11-25 13:55:24 +0000752 "conv3 layer", outputInfo);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100753 AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
754 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
755
756 // Connect the network
757 input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100758 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
759 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100760 conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100761
762 input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
763 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
764 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100765 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100766 weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
767 biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100768 conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100769
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100770 addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
771
772 // Create the subgraph view for the whole network
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100773 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100774 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100775 conv2Layer}, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100776 CreateOutputsFrom({addLayer}),
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100777 { weightsLayer1,
778 biasLayer1,
779 weightsLayer2,
780 biasLayer2,
781 weightsLayer3,
782 biasLayer3,
783 conv1Layer,
784 conv2Layer,
785 conv3Layer,
786 addLayer });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100787}
788
789// The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
790void FullyUnsupporteSubgraphTestImpl1()
791{
792 Graph graph;
793 LayerNameToLayerMap layersInGraph;
794
795 // Create an unsupported subgraph
796 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100797 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100798
Francis Murtagh56ccf682021-12-13 18:48:12 +0000799 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
800 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
801 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100802
Sadik Armagan1625efc2021-06-10 18:24:34 +0100803 CHECK(subgraphInputSlots.size() == 1);
804 CHECK(subgraphOutputSlots.size() == 1);
805 CHECK(subgraphLayers.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100806
Sadik Armagan1625efc2021-06-10 18:24:34 +0100807 CHECK(Contains(layersInGraph, "pooling layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100808
809 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000810 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100811 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100812 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100813
814 // Optimize the subgraph
815 OptimizationViews optimizationViews;
816
817 // Check that the optimization is carried out correctly, but no optimization is performed
Sadik Armagan1625efc2021-06-10 18:24:34 +0100818 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100819
820 // =======================================================================
821 // The expected results are:
822 // - No substitutions
823 // - Exactly one failed subgraph, corresponding to the whole original one
824 // - No untouched subgraphs
825 // =======================================================================
826
827 // -----------------------
828 // Check the substitutions
829 // -----------------------
830
Sadik Armagan1625efc2021-06-10 18:24:34 +0100831 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100832
833 // --------------------------
834 // Check the failed subgraphs
835 // --------------------------
836
837 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100838 CHECK(failedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100839
840 CheckFailedSubgraph(failedSubgraphs.at(0),
841 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
842 subgraphInputSlots,
843 subgraphOutputSlots,
844 subgraphLayers);
845
846 // -----------------------------
847 // Check the untouched subgraphs
848 // -----------------------------
849
Sadik Armagan1625efc2021-06-10 18:24:34 +0100850 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100851}
852
853// The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
854void FullyUnsupporteSubgraphTestImpl2()
855{
856 Graph graph;
857 LayerNameToLayerMap layersInGraph;
858
859 // Create an unsupported subgraph
860 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100861 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100862
Francis Murtagh56ccf682021-12-13 18:48:12 +0000863 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
864 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
865 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100866
Sadik Armagan1625efc2021-06-10 18:24:34 +0100867 CHECK(subgraphInputSlots.size() == 1);
868 CHECK(subgraphOutputSlots.size() == 1);
869 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100870
Sadik Armagan1625efc2021-06-10 18:24:34 +0100871 CHECK(Contains(layersInGraph, "pooling1 layer"));
872 CHECK(Contains(layersInGraph, "pooling2 layer"));
873 CHECK(Contains(layersInGraph, "pooling3 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100874
875 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000876 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100877 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100878 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100879
880 // Optimize the subgraph
881 OptimizationViews optimizationViews;
882
883 // Check that the optimization is carried out correctly, but no optimization is performed
Sadik Armagan1625efc2021-06-10 18:24:34 +0100884 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100885
886 // =======================================================================
887 // The expected results are:
888 // - No substitutions
889 // - Exactly one failed subgraph, corresponding to the whole original one
890 // - No untouched subgraphs
891 // =======================================================================
892
893 // -----------------------
894 // Check the substitutions
895 // -----------------------
896
Sadik Armagan1625efc2021-06-10 18:24:34 +0100897 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100898
899 // --------------------------
900 // Check the failed subgraphs
901 // --------------------------
902
903 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100904 CHECK(failedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100905
Francis Murtagh56ccf682021-12-13 18:48:12 +0000906 std::list<IConnectableLayer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
Sadik Armagan1625efc2021-06-10 18:24:34 +0100907 layersInGraph.at("pooling2 layer"),
908 layersInGraph.at("pooling3 layer") };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100909
910 const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
911
912 CheckFailedSubgraph(failedSubgraph,
913 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
914 subgraphInputSlots,
915 subgraphOutputSlots,
916 subgraphLayers);
917
Francis Murtagh56ccf682021-12-13 18:48:12 +0000918 const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100919
Sadik Armagan1625efc2021-06-10 18:24:34 +0100920 CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
921 CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
922 CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100923
924 // -----------------------------
925 // Check the untouched subgraphs
926 // -----------------------------
927
Sadik Armagan1625efc2021-06-10 18:24:34 +0100928 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100929}
930
931// A simple case with only one layer (convolution) to optimize, supported by the mock backend
932void FullyOptimizableSubgraphTestImpl1()
933{
934 Graph graph;
935 LayerNameToLayerMap layersInGraph;
936
937 // Create a fully optimizable subgraph
938 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100939 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100940
Francis Murtagh56ccf682021-12-13 18:48:12 +0000941 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
942 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
943 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100944
Sadik Armagan1625efc2021-06-10 18:24:34 +0100945 CHECK(subgraphInputSlots.size() == 1);
946 CHECK(subgraphOutputSlots.size() == 1);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100947 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100948
Sadik Armagan1625efc2021-06-10 18:24:34 +0100949 CHECK(Contains(layersInGraph, "conv layer"));
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100950 CHECK(Contains(layersInGraph, "Weights Layer"));
951 CHECK(Contains(layersInGraph, "Bias Layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100952
953 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000954 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100955 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100956 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100957
958 // Optimize the subgraph
959 OptimizationViews optimizationViews;
960
961 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +0100962 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100963
964 // ===========================================================================================
965 // The expected results are:
966 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
967 // - No failed subgraphs
968 // - No untouched subgraphs
969 // ===========================================================================================
970
971 // -----------------------
972 // Check the substitutions
973 // -----------------------
974
975 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100976 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100977
978 CheckSubstitution(substitutions.at(0),
979 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
980 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
981 subgraphInputSlots,
982 subgraphOutputSlots,
983 subgraphLayers);
984
985 // --------------------------
986 // Check the failed subgraphs
987 // --------------------------
988
Sadik Armagan1625efc2021-06-10 18:24:34 +0100989 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100990
991 // -----------------------------
992 // Check the untouched subgraphs
993 // -----------------------------
994
Sadik Armagan1625efc2021-06-10 18:24:34 +0100995 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100996}
997
998// A case with five layers (all convolutions) to optimize, all supported by the mock backend
999void FullyOptimizableSubgraphTestImpl2()
1000{
1001 Graph graph;
1002 LayerNameToLayerMap layersInGraph;
1003
1004 // Create a fully optimizable subgraph
1005 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001006 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001007
Francis Murtagh56ccf682021-12-13 18:48:12 +00001008 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1009 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1010 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001011
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001012 CHECK(subgraphInputSlots.size() == 1);
1013 CHECK(subgraphOutputSlots.size() == 1);
1014 CHECK(subgraphPtr->GetIConnectableLayers().size() == 15);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001015
Sadik Armagan1625efc2021-06-10 18:24:34 +01001016 CHECK(Contains(layersInGraph, "conv1 layer"));
1017 CHECK(Contains(layersInGraph, "conv2 layer"));
1018 CHECK(Contains(layersInGraph, "conv3 layer"));
1019 CHECK(Contains(layersInGraph, "conv4 layer"));
1020 CHECK(Contains(layersInGraph, "conv5 layer"));
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001021 CHECK(Contains(layersInGraph, "Weights Layer 1"));
1022 CHECK(Contains(layersInGraph, "Weights Layer 2"));
1023 CHECK(Contains(layersInGraph, "Weights Layer 3"));
1024 CHECK(Contains(layersInGraph, "Weights Layer 4"));
1025 CHECK(Contains(layersInGraph, "Weights Layer 5"));
1026 CHECK(Contains(layersInGraph, "Bias Layer 1"));
1027 CHECK(Contains(layersInGraph, "Bias Layer 2"));
1028 CHECK(Contains(layersInGraph, "Bias Layer 3"));
1029 CHECK(Contains(layersInGraph, "Bias Layer 4"));
1030 CHECK(Contains(layersInGraph, "Bias Layer 5"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001031
1032 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001033 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001034 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001035 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001036
1037 // Optimize the subgraph
1038 OptimizationViews optimizationViews;
1039
1040 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001041 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001042
1043 // ===========================================================================================
1044 // The expected results are:
1045 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
1046 // - No failed subgraphs
1047 // - No untouched subgraphs
1048 // ===========================================================================================
1049
1050 // -----------------------
1051 // Check the substitutions
1052 // -----------------------
1053
1054 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001055 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001056
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001057 std::list<IConnectableLayer*> expectedSubstitutableLayers{
1058 layersInGraph.at("Weights Layer 1"),
1059 layersInGraph.at("Weights Layer 2"),
1060 layersInGraph.at("Weights Layer 3"),
1061 layersInGraph.at("Weights Layer 4"),
1062 layersInGraph.at("Weights Layer 5"),
1063 layersInGraph.at("Bias Layer 1"),
1064 layersInGraph.at("Bias Layer 2"),
1065 layersInGraph.at("Bias Layer 3"),
1066 layersInGraph.at("Bias Layer 4"),
1067 layersInGraph.at("Bias Layer 5"),
1068 layersInGraph.at("conv1 layer"),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001069 layersInGraph.at("conv2 layer"),
1070 layersInGraph.at("conv3 layer"),
1071 layersInGraph.at("conv4 layer"),
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001072 layersInGraph.at("conv5 layer")};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001073
1074 const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
1075
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001076 CheckSubstitution(
1077 substitution,
1078 {subgraphInputSlots.size(), subgraphOutputSlots.size(),
1079 subgraphLayers.size()},
1080 {subgraphInputSlots.size(), subgraphOutputSlots.size(), 1},
1081 subgraphInputSlots, subgraphOutputSlots, expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001082
Francis Murtagh56ccf682021-12-13 18:48:12 +00001083 const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
1084 substitution.m_SubstitutableSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001085
Sadik Armagan1625efc2021-06-10 18:24:34 +01001086 CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
1087 CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
1088 CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
1089 CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
1090 CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001091
1092 // --------------------------
1093 // Check the failed subgraphs
1094 // --------------------------
1095
Sadik Armagan1625efc2021-06-10 18:24:34 +01001096 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001097
1098 // -----------------------------
1099 // Check the untouched subgraphs
1100 // -----------------------------
1101
Sadik Armagan1625efc2021-06-10 18:24:34 +01001102 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001103}
1104
1105// The input subgraph contaions both supported and unsupported layers
1106// (but only convolutions are unsupported by the mock backend)
1107void PartiallySupportedSubgraphTestImpl()
1108{
1109 Graph graph;
1110 LayerNameToLayerMap layersInGraph;
1111
1112 // Create a fully optimizable subgraph
1113 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001114 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001115
Francis Murtagh56ccf682021-12-13 18:48:12 +00001116 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1117 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1118 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001119
Sadik Armagan1625efc2021-06-10 18:24:34 +01001120 CHECK(subgraphInputSlots.size() == 1);
1121 CHECK(subgraphOutputSlots.size() == 1);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001122 CHECK(subgraphLayers.size() == 9);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001123
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001124 CHECK(Contains(layersInGraph, "Weights Layer 1"));
1125 CHECK(Contains(layersInGraph, "Bias Layer 1"));
Sadik Armagan1625efc2021-06-10 18:24:34 +01001126 CHECK(Contains(layersInGraph, "conv1 layer"));
1127 CHECK(Contains(layersInGraph, "pooling1 layer"));
1128 CHECK(Contains(layersInGraph, "pooling2 layer"));
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001129 CHECK(Contains(layersInGraph, "Weights Layer 2"));
1130 CHECK(Contains(layersInGraph, "Bias Layer 2"));
Sadik Armagan1625efc2021-06-10 18:24:34 +01001131 CHECK(Contains(layersInGraph, "conv2 layer"));
1132 CHECK(Contains(layersInGraph, "pooling3 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001133
1134 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001135 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001136 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001137 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001138
1139 // Optimize the subgraph
1140 OptimizationViews optimizationViews;
1141
1142 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001143 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001144
1145 // ========================================================================
1146 // The expected results are:
1147 // - Exactly two substitution, corresponding to the supported layers
1148 // - Exactly two failed subgraphs, corresponding to the unsupported layers
1149 // - No untouched subgraphs
1150 // ========================================================================
1151
1152 // -----------------------
1153 // Check the substitutions
1154 // -----------------------
1155
Rob Hughes30db8ad2019-11-08 15:50:10 +00001156 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001157 CHECK(substitutions.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001158 // Sort into a consistent order
1159 std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001160 return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1161 s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0;
Rob Hughes30db8ad2019-11-08 15:50:10 +00001162 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001163
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001164 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1165 { 1, 1, 3 } };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001166 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1167 { 1, 1, 1 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001168 std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001169 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001170 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001171 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001172 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001173 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlot(0))})
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001174 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001175
1176 std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001177 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001178 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1179 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1180 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1181 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001182 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001183 std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001184 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001185 { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1186 { layersInGraph.at("Weights Layer 2"), layersInGraph.at("Bias Layer 2"), layersInGraph.at("conv2 layer") }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001187 };
1188
1189 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1190 {
1191 CheckSubstitution(substitutions.at(substitutionIndex),
1192 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1193 expectedReplacementSubgraphSizes.at(substitutionIndex),
1194 expectedSubstitutableInputSlots.at(substitutionIndex),
1195 expectedSubstitutableOutputSlots.at(substitutionIndex),
1196 expectedSubstitutableLayers.at(substitutionIndex));
1197 }
1198
1199 // --------------------------
1200 // Check the failed subgraphs
1201 // --------------------------
1202
Rob Hughes30db8ad2019-11-08 15:50:10 +00001203 OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001204 CHECK(failedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001205 // Sort into a consistent order
1206 std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001207 return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1208 s2.GetIConnectableLayers().front()->GetName()) < 0;
Rob Hughes30db8ad2019-11-08 15:50:10 +00001209 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001210
1211 std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
1212 { 1, 1, 1 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001213 std::vector<SubgraphView::IInputSlots> expectedFailedInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001214 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001215 ConvertSlotsToISlots<InputSlot, IInputSlot>(
1216 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots())),
1217 ConvertSlotsToISlots<InputSlot, IInputSlot>(
1218 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001219 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001220 std::vector<SubgraphView::IOutputSlots> expectedFailedOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001221 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001222 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1223 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots())),
1224 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1225 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001226 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001227 std::vector<SubgraphView::IConnectableLayers> expectedFailedLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001228 {
1229 { layersInGraph.at("pooling1 layer"),
1230 layersInGraph.at("pooling2 layer") },
1231 { layersInGraph.at("pooling3 layer") }
1232 };
1233
1234 for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
1235 {
1236 CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
1237 expectedFailedSubgraphSizes.at(failedIndex),
1238 expectedFailedInputSlots.at(failedIndex),
1239 expectedFailedOutputSlots.at(failedIndex),
1240 expectedFailedLayers.at(failedIndex));
1241 }
1242
1243 // -----------------------------
1244 // Check the untouched subgraphs
1245 // -----------------------------
1246
Sadik Armagan1625efc2021-06-10 18:24:34 +01001247 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001248}
1249
1250// The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
1251void FullyUnoptimizableSubgraphTestImpl1()
1252{
1253 Graph graph;
1254 LayerNameToLayerMap layersInGraph;
1255
1256 // Create a fully optimizable subgraph
1257 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001258 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001259
Francis Murtagh56ccf682021-12-13 18:48:12 +00001260 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1261 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1262 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001263
Sadik Armagan1625efc2021-06-10 18:24:34 +01001264 CHECK(subgraphInputSlots.size() == 1);
1265 CHECK(subgraphOutputSlots.size() == 1);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001266 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001267
Sadik Armagan1625efc2021-06-10 18:24:34 +01001268 CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001269
1270 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001271 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001272 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001273 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001274
1275 // Optimize the subgraph
1276 OptimizationViews optimizationViews;
1277
1278 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001279 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001280
1281 // ============================================================================
1282 // The expected results are:
1283 // - No substitutions
1284 // - No failed subgraphs
1285 // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1286 // ============================================================================
1287
1288 // -----------------------
1289 // Check the substitutions
1290 // -----------------------
1291
Sadik Armagan1625efc2021-06-10 18:24:34 +01001292 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001293
1294 // --------------------------
1295 // Check the failed subgraphs
1296 // --------------------------
1297
Sadik Armagan1625efc2021-06-10 18:24:34 +01001298 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001299
1300 // -----------------------------
1301 // Check the untouched subgraphs
1302 // -----------------------------
1303
1304 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001305 CHECK(untouchedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001306
1307 CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001308 {subgraphInputSlots.size(),
1309 subgraphOutputSlots.size(), subgraphLayers.size()},
1310 subgraphInputSlots, subgraphOutputSlots,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001311 subgraphLayers);
1312}
1313
1314// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1315void PartiallyOptimizableSubgraphTestImpl1()
1316{
1317 Graph graph;
1318 LayerNameToLayerMap layersInGraph;
1319
1320 // Create a fully optimizable subgraph
1321 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001322 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001323
Francis Murtagh56ccf682021-12-13 18:48:12 +00001324 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1325 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1326 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001327
Sadik Armagan1625efc2021-06-10 18:24:34 +01001328 CHECK(subgraphInputSlots.size() == 1);
1329 CHECK(subgraphOutputSlots.size() == 1);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001330 CHECK(subgraphLayers.size() == 15);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001331
Sadik Armagan1625efc2021-06-10 18:24:34 +01001332 CHECK(Contains(layersInGraph, "conv1 layer"));
1333 CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1334 CHECK(Contains(layersInGraph, "conv3 layer"));
1335 CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1336 CHECK(Contains(layersInGraph, "conv5 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001337
1338 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001339 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001340 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001341 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001342
1343 // Optimize the subgraph
1344 OptimizationViews optimizationViews;
1345
1346 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001347 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001348
1349 // ===============================================================================
1350 // The expected results are:
1351 // - Exactly three substitutions, corresponding to the optimizable layers
1352 // - No failed subgraphs
1353 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1354 // ===============================================================================
1355
1356 // -----------------------
1357 // Check the substitutions
1358 // -----------------------
1359
Rob Hughes30db8ad2019-11-08 15:50:10 +00001360 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001361 CHECK(substitutions.size() == 3);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001362 // Sort into a consistent order
1363 std::sort(substitutions.begin(), substitutions.end(),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001364 [](auto s1, auto s2)
1365 { return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1366 s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0; });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001367
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001368 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1369 { 1, 1, 3 },
1370 { 1, 1, 3 } };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001371 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1372 { 1, 1, 1 },
1373 { 1, 1, 1 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001374 std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001375 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001376 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001377 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001378 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001379 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlot(0))}),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001380 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001381 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlot(0))})
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001382 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001383 std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001384 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001385 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1386 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1387 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1388 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots())),
1389 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1390 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001391 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001392 std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001393 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001394 { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1395 { layersInGraph.at("Weights Layer 3"), layersInGraph.at("Bias Layer 3"), layersInGraph.at("conv3 layer") },
1396 { layersInGraph.at("Weights Layer 5"), layersInGraph.at("Bias Layer 5"), layersInGraph.at("conv5 layer") }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001397 };
1398
1399 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1400 {
1401 CheckSubstitution(substitutions.at(substitutionIndex),
1402 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1403 expectedReplacementSubgraphSizes.at(substitutionIndex),
1404 expectedSubstitutableInputSlots.at(substitutionIndex),
1405 expectedSubstitutableOutputSlots.at(substitutionIndex),
1406 expectedSubstitutableLayers.at(substitutionIndex));
1407 }
1408
1409 // --------------------------
1410 // Check the failed subgraphs
1411 // --------------------------
1412
Sadik Armagan1625efc2021-06-10 18:24:34 +01001413 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001414
1415 // -----------------------------
1416 // Check the untouched subgraphs
1417 // -----------------------------
1418
Rob Hughes30db8ad2019-11-08 15:50:10 +00001419 OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001420 CHECK(untouchedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001421 // Sort into a consistent order
1422 std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001423 return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1424 s2.GetIConnectableLayers().front()->GetName()) < 0;
Rob Hughes30db8ad2019-11-08 15:50:10 +00001425 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001426
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001427 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 },
1428 { 1, 1, 3 } };
1429 std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots{
1430 ConvertSlotsToISlots<InputSlot,
1431 IInputSlot>({ConvertReferenceTypeToPointerType(
1432 layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))}),
1433 ConvertSlotsToISlots<InputSlot,
1434 IInputSlot>({ConvertReferenceTypeToPointerType(
1435 layersInGraph.at("conv4 layer unoptimizable")->GetInputSlot(0))})};
1436
Francis Murtagh56ccf682021-12-13 18:48:12 +00001437 std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001438 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001439 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001440 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001441 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001442 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots()))
1443 };
1444
Francis Murtagh56ccf682021-12-13 18:48:12 +00001445 std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001446 {
1447 { layersInGraph.at("Weights Layer 2 unoptimizable"),
1448 layersInGraph.at("Bias Layer 2 unoptimizable"),
1449 layersInGraph.at("conv2 layer unoptimizable") },
1450 { layersInGraph.at("Weights Layer 4 unoptimizable"),
1451 layersInGraph.at("Bias Layer 4 unoptimizable"),
1452 layersInGraph.at("conv4 layer unoptimizable") }
1453 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001454
1455 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1456 {
1457 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1458 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1459 expectedUntouchedInputSlots.at(untouchedIndex),
1460 expectedUntouchedOutputSlots.at(untouchedIndex),
1461 expectedUntouchedLayers.at(untouchedIndex));
1462 }
1463}
1464
1465// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1466// this is meant to test input slots coming from different layers
1467void PartiallyOptimizableSubgraphTestImpl2()
1468{
1469 Graph graph;
1470 LayerNameToLayerMap layersInGraph;
1471
Rob Hughes30db8ad2019-11-08 15:50:10 +00001472 // Create a partially optimizable subgraph
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001473 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001474 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001475
Francis Murtagh56ccf682021-12-13 18:48:12 +00001476 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1477 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1478 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001479
Sadik Armagan1625efc2021-06-10 18:24:34 +01001480 CHECK(subgraphInputSlots.size() == 2);
1481 CHECK(subgraphOutputSlots.size() == 1);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001482 CHECK(subgraphLayers.size() == 10);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001483
Sadik Armagan1625efc2021-06-10 18:24:34 +01001484 CHECK(Contains(layersInGraph, "conv1 layer"));
1485 CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1486 CHECK(Contains(layersInGraph, "conv3 layer"));
1487 CHECK(Contains(layersInGraph, "add layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001488
1489 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001490 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001491 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001492 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001493
1494 // Optimize the subgraph
1495 OptimizationViews optimizationViews;
1496
1497 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001498 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001499
1500 // ==============================================================================
1501 // The expected results are:
1502 // - Exactly one substitution, corresponding to the optimizable layers
1503 // - No failed subgraphs
1504 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1505 // ==============================================================================
1506
1507 // -----------------------
1508 // Check the substitutions
1509 // -----------------------
1510
1511 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001512 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001513
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001514 ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 7 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001515 ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001516
Francis Murtagh56ccf682021-12-13 18:48:12 +00001517 SubgraphView::IInputSlots expectedSubstitutableInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001518 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001519 ConvertSlotsToISlots<InputSlot, IInputSlot>({
1520 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0])})[0],
1521 ConvertSlotsToISlots<InputSlot, IInputSlot>({
1522 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])})[0]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001523 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001524
1525 SubgraphView::IOutputSlots expectedSubstitutableOutputSlots
1526 {
1527 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1528 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()))
1529 };
1530
1531 SubgraphView::IConnectableLayers expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001532 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001533 layersInGraph.at("Weights Layer 1"),
1534 layersInGraph.at("Weights Layer 3"),
1535 layersInGraph.at("Bias Layer 1"),
1536 layersInGraph.at("Bias Layer 3"),
Rob Hughes30db8ad2019-11-08 15:50:10 +00001537 layersInGraph.at("conv1 layer"),
1538 layersInGraph.at("conv3 layer"),
1539 layersInGraph.at("add layer")
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001540 };
1541
Rob Hughes30db8ad2019-11-08 15:50:10 +00001542 CheckSubstitution(substitutions[0],
1543 expectedSubstitutableSubgraphSizes,
1544 expectedReplacementSubgraphSizes,
1545 expectedSubstitutableInputSlots,
1546 expectedSubstitutableOutputSlots,
1547 expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001548
1549 // --------------------------
1550 // Check the failed subgraphs
1551 // --------------------------
1552
Sadik Armagan1625efc2021-06-10 18:24:34 +01001553 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001554
1555 // -----------------------------
1556 // Check the untouched subgraphs
1557 // -----------------------------
1558
1559 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001560 CHECK(untouchedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001561
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001562 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001563 std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001564 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001565 ConvertSlotsToISlots<InputSlot,
1566 IInputSlot>({ConvertReferenceTypeToPointerType(
1567 layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))})};
Francis Murtagh56ccf682021-12-13 18:48:12 +00001568 std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001569 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001570 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1571 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001572 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001573 std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001574 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001575 { layersInGraph.at("conv2 layer unoptimizable"), layersInGraph.at("Weights Layer 2 unoptimizable"),
1576 layersInGraph.at("Bias Layer 2 unoptimizable") }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001577 };
1578
1579 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1580 {
1581 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1582 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1583 expectedUntouchedInputSlots.at(untouchedIndex),
1584 expectedUntouchedOutputSlots.at(untouchedIndex),
1585 expectedUntouchedLayers.at(untouchedIndex));
1586 }
1587}
1588
1589} // Anonymous namespace
1590
Sadik Armagan1625efc2021-06-10 18:24:34 +01001591TEST_SUITE("OptimizeSubGraph")
1592{
1593TEST_CASE("FullyUnsupportedSubgraph1") { FullyUnsupporteSubgraphTestImpl1(); }
1594TEST_CASE("FullyUnsupportedSubgraph2") { FullyUnsupporteSubgraphTestImpl2(); }
1595TEST_CASE("FullyOptimizableSubgraph1") { FullyOptimizableSubgraphTestImpl1(); }
1596TEST_CASE("FullyOptimizableSubgraph2") { FullyOptimizableSubgraphTestImpl2(); }
1597TEST_CASE("PartiallySupportedSubgraph") { PartiallySupportedSubgraphTestImpl(); }
1598TEST_CASE("FullyUnoptimizableSubgraph") { FullyUnoptimizableSubgraphTestImpl1(); }
1599TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1600TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001601
Sadik Armagan1625efc2021-06-10 18:24:34 +01001602}