blob: 45fcf19f90caf3ff692b0bd92e0ef3ce980761bd [file] [log] [blame]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Sadik Armagana097d2a2021-11-24 15:47:28 +00006#include <CommonTestUtils.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01007#include "MockBackendId.hpp"
8
9#include <Graph.hpp>
10#include <Network.hpp>
11
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Cathal Corbett3464ba12022-03-04 11:36:39 +000013#include <armnnTestUtils/MockBackend.hpp>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010014
Sadik Armagan1625efc2021-06-10 18:24:34 +010015#include <doctest/doctest.h>
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010016#include <unordered_map>
17
18using namespace armnn;
19
20namespace
21{
22
23// The expected number of layers, input and output slots in a subgraph after a test
24struct ExpectedSubgraphSize
25{
26 size_t m_NumInputSlots = 0;
27 size_t m_NumOutputSlots = 0;
28 size_t m_NumLayers = 0;
29};
30
31// Keep the layers organized by layer name
32using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33
34// Used to convert input and output slots from reference type (as stored in graphs) to
35// pointer type (as stored in subgraphs)
36template <typename SlotType>
37SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38{
39 return const_cast<SlotType*>(&input);
40}
41
42// Used to convert input and output slots from reference type (as stored in graphs) to
43// pointer type (as stored in subgraphs), array version
44template <typename SlotType>
45std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46{
47 std::vector<SlotType*> output;
48 std::transform(input.begin(),
49 input.end(),
50 std::back_inserter(output),
51 [](const SlotType& inputItem)
52 {
53 return ConvertReferenceTypeToPointerType(inputItem);
54 });
55
56 return output;
57}
58
Francis Murtagh56ccf682021-12-13 18:48:12 +000059// Convert from vector of Slots* (Input/Output) to vector of ISlots* (IInput/IOutput)
60template <typename SlotType, typename ResultSlotType>
61std::vector<ResultSlotType*> ConvertSlotsToISlots(const std::vector<SlotType*> input)
62{
63 std::vector<ResultSlotType*> output;
64 for (auto slot : input)
65 {
66 output.push_back(PolymorphicDowncast<ResultSlotType*>(slot));
67 }
68 return output;
69}
70
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010071// Convenience function to add an input layer to a graph
72Layer* AddInputLayer(Graph& graph,
73 const std::string& layerName,
74 const TensorInfo& inputInfo,
75 LayerBindingId inputId = 0)
76{
77 Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010078 CHECK(inputLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010079 inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
80 return inputLayer;
81}
82
83// Convenience function to add an output layer to a graph
84Layer* AddOutputLayer(Graph& graph,
85 const std::string& layerName)
86{
87 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +010088 CHECK(outputLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +010089 return outputLayer;
90}
91
92// Convenience function to add a convolution layer to a graph
93Convolution2dLayer* AddConvolutionLayer(Graph& graph,
94 LayerNameToLayerMap& layersInGraph,
95 const Convolution2dDescriptor& convolutionDescriptor,
96 const std::string& layerName,
97 const TensorInfo& weightInfo,
98 const TensorInfo& biasInfo,
99 const TensorInfo& outputInfo)
100{
101 Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100102 CHECK(convLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100103 SetWeightAndBias(convLayer, weightInfo, biasInfo);
104 convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
105 layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
106 return convLayer;
107}
108
Keith Davis2cddc722022-04-07 11:32:00 +0100109// Convenience function to add a constant layer to a graph
110ConstantLayer* AddConstantLayer(Graph& graph,
111 LayerNameToLayerMap& layersInGraph,
112 const std::string& layerName,
113 const ConstTensor& constTensor,
114 const TensorInfo& outputInfo)
115{
116 ConstantLayer* const constantLayer = graph.AddLayer<ConstantLayer>(layerName.c_str());
117 CHECK(constantLayer);
118 constantLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
119 constantLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
120 layersInGraph.insert(std::make_pair(constantLayer->GetName(), constantLayer));
121 return constantLayer;
122}
123
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100124// Convenience function to add a pooling layer to a graph
125Pooling2dLayer* AddPoolingLayer(Graph& graph,
126 LayerNameToLayerMap& layersInGraph,
127 const Pooling2dDescriptor& poolingDescriptor,
128 const std::string& layerName,
129 const TensorInfo& outputInfo)
130{
131 Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100132 CHECK(poolingLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100133 poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
134 layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
135 return poolingLayer;
136}
137
138// Convenience function to add an addition layer to a graph
139AdditionLayer* AddAdditionaLayer(Graph& graph,
140 LayerNameToLayerMap& layersInGraph,
141 const std::string& layerName,
142 const TensorInfo& outputInfo)
143{
144 AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100145 CHECK(additionLayer);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100146 additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
147 layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
148 return additionLayer;
149}
150
151// Convenience function to check that the given substitution matches the specified expected values
152void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
153 const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
154 const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
Francis Murtagh56ccf682021-12-13 18:48:12 +0000155 const SubgraphView::IInputSlots& expectedSubstitutableInputSlots,
156 const SubgraphView::IOutputSlots& expectedSubstitutableOutputSlots,
157 const SubgraphView::IConnectableLayers& expectedSubstitutableLayers)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100158{
Francis Murtagh56ccf682021-12-13 18:48:12 +0000159 const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
160 const SubgraphView::IInputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetIInputSlots();
161 const SubgraphView::IOutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetIOutputSlots();
162 const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
163 substitutableSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100164
Francis Murtagh56ccf682021-12-13 18:48:12 +0000165 const SubgraphView& replacementSubgraph = substitution.m_ReplacementSubgraph;
166 const SubgraphView::IInputSlots& replacementSubgraphInputSlots = replacementSubgraph.GetIInputSlots();
167 const SubgraphView::IOutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetIOutputSlots();
168 const SubgraphView::IConnectableLayers& replacementSubgraphLayers = replacementSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100169
Sadik Armagan1625efc2021-06-10 18:24:34 +0100170 CHECK(substitutableSubgraphInputSlots.size() == expectedSubstitutableSubgraphSize.m_NumInputSlots);
171 CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
172 CHECK(substitutableSubgraphLayers.size() == expectedSubstitutableSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100173
Sadik Armagan1625efc2021-06-10 18:24:34 +0100174 CHECK(AreEqual(substitutableSubgraphInputSlots, expectedSubstitutableInputSlots));
175 CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
176 CHECK(AreEqual(substitutableSubgraphLayers, expectedSubstitutableLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100177
Sadik Armagan1625efc2021-06-10 18:24:34 +0100178 CHECK(replacementSubgraphInputSlots.size() == expectedReplacementSubgraphSize.m_NumInputSlots);
179 CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
180 CHECK(replacementSubgraphLayers.size() == expectedReplacementSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100181
Sadik Armagan1625efc2021-06-10 18:24:34 +0100182 CHECK(!AreEqual(replacementSubgraphInputSlots, expectedSubstitutableInputSlots));
183 CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
184 CHECK(!AreEqual(replacementSubgraphLayers, expectedSubstitutableLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100185
Sadik Armagan1625efc2021-06-10 18:24:34 +0100186 CHECK(std::all_of(replacementSubgraphLayers.begin(),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100187 replacementSubgraphLayers.end(),
Francis Murtagh56ccf682021-12-13 18:48:12 +0000188 [](const IConnectableLayer* layer)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100189 {
190 return layer->GetType() == LayerType::PreCompiled;
191 }));
192}
193
194// Convenience function to check that the given failed subgraph matches the specified expected values
195void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
196 const ExpectedSubgraphSize& expectedFailedSubgraphSize,
Francis Murtagh56ccf682021-12-13 18:48:12 +0000197 const SubgraphView::IInputSlots& expectedFailedInputSlots,
198 const SubgraphView::IOutputSlots& expectedFailedOutputSlots,
199 const SubgraphView::IConnectableLayers& expectedFailedLayers)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100200{
Francis Murtagh56ccf682021-12-13 18:48:12 +0000201 const SubgraphView::IInputSlots& failedSubgraphInputSlots = failedSubgraph.GetIInputSlots();
202 const SubgraphView::IOutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetIOutputSlots();
203 const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100204
Sadik Armagan1625efc2021-06-10 18:24:34 +0100205 CHECK(failedSubgraphInputSlots.size() == expectedFailedSubgraphSize.m_NumInputSlots);
206 CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
207 CHECK(failedSubgraphLayers.size() == expectedFailedSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100208
Sadik Armagan1625efc2021-06-10 18:24:34 +0100209 CHECK(AreEqual(failedSubgraphInputSlots, expectedFailedInputSlots));
210 CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
211 CHECK(AreEqual(failedSubgraphLayers, expectedFailedLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100212}
213
214// Convenience function to check that the given untouched subgraph matches the specified expected values
215void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
216 const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
Francis Murtagh56ccf682021-12-13 18:48:12 +0000217 const SubgraphView::IInputSlots& expectedUntouchedInputSlots,
218 const SubgraphView::IOutputSlots& expectedUntouchedOutputSlots,
219 const SubgraphView::IConnectableLayers& expectedUntouchedLayers)
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100220{
Francis Murtagh56ccf682021-12-13 18:48:12 +0000221 const SubgraphView::IInputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetIInputSlots();
222 const SubgraphView::IOutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetIOutputSlots();
223 const SubgraphView::IConnectableLayers& untouchedSubgraphLayers = untouchedSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100224
Sadik Armagan1625efc2021-06-10 18:24:34 +0100225 CHECK(untouchedSubgraphInputSlots.size() == expectedUntouchedSubgraphSize.m_NumInputSlots);
226 CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
227 CHECK(untouchedSubgraphLayers.size() == expectedUntouchedSubgraphSize.m_NumLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100228
Sadik Armagan1625efc2021-06-10 18:24:34 +0100229 CHECK(AreEqual(untouchedSubgraphInputSlots, expectedUntouchedInputSlots));
230 CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
231 CHECK(AreEqual(untouchedSubgraphLayers, expectedUntouchedLayers));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100232}
233
234// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
235SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
236{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000237 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
238 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100239
240 Pooling2dDescriptor poolingDescriptor;
241 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
242 poolingDescriptor.m_PoolWidth = 2;
243 poolingDescriptor.m_PoolHeight = 2;
244 poolingDescriptor.m_StrideX = 2;
245 poolingDescriptor.m_StrideY = 2;
246 poolingDescriptor.m_PadLeft = 1;
247 poolingDescriptor.m_PadRight = 1;
248 poolingDescriptor.m_PadTop = 1;
249 poolingDescriptor.m_PadBottom = 1;
250 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
251 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
252
253 // Construct the graph
254 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
255 Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
256 "pooling layer", outputInfo);
257 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
258
259 // Connect the network
260 inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
261 poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
262
263 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100264 return CreateSubgraphViewFrom(CreateInputsFrom(poolingLayer),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100265 CreateOutputsFrom({poolingLayer}),
266 {poolingLayer});
267}
268
269// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
270SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
271{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000272 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
273 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100274
275 Pooling2dDescriptor poolingDescriptor;
276 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
277 poolingDescriptor.m_PoolWidth = 2;
278 poolingDescriptor.m_PoolHeight = 2;
279 poolingDescriptor.m_StrideX = 2;
280 poolingDescriptor.m_StrideY = 2;
281 poolingDescriptor.m_PadLeft = 1;
282 poolingDescriptor.m_PadRight = 1;
283 poolingDescriptor.m_PadTop = 1;
284 poolingDescriptor.m_PadBottom = 1;
285 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
286 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
287
288 // Construct the graph
289 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
290 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
291 "pooling1 layer", outputInfo);
292 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
293 "pooling2 layer", outputInfo);
294 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
295 "pooling3 layer", outputInfo);
296 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
297
298 // Connect the network
299 inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
300 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
301 pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
302 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
303
304 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100305 return CreateSubgraphViewFrom(CreateInputsFrom(pooling1Layer),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100306 CreateOutputsFrom({pooling3Layer}),
307 {pooling1Layer,
308 pooling2Layer,
309 pooling3Layer});
310}
311
312// Creates a simple subgraph with only one convolution layer, supported by the mock backend
313SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
314{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000315 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
316 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davis2cddc722022-04-07 11:32:00 +0100317 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
318 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
319
320 weightInfo.SetConstant(true);
321 biasInfo.SetConstant(true);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100322
323 Convolution2dDescriptor convolutionDescriptor;
324 convolutionDescriptor.m_StrideX = 1;
325 convolutionDescriptor.m_StrideY = 1;
326 convolutionDescriptor.m_BiasEnabled = true;
327 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
328
Keith Davis2cddc722022-04-07 11:32:00 +0100329 std::vector<float> weightsVector(64);
330 ConstTensor constWeightsTensor(weightInfo, weightsVector);
331
332 std::vector<float> biasVector(16);
333 ConstTensor constBiasTensor(biasInfo, biasVector);
334
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100335 // Construct the graph
336 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
337 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
338 "conv layer", weightInfo, biasInfo, outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100339
340 ConstantLayer* const weightsLayer =
341 AddConstantLayer(graph, layersInGraph, "Weights Layer", constWeightsTensor, outputInfo);
342 ConstantLayer* const biasLayer = AddConstantLayer(graph, layersInGraph, "Bias Layer", constBiasTensor, outputInfo);
343
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100344 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
345
346 // Connect the network
347 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100348 weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
349 biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100350 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
351
Keith Davis2cddc722022-04-07 11:32:00 +0100352 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100353 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100354 return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100355 CreateOutputsFrom({convLayer}),
Keith Davis2cddc722022-04-07 11:32:00 +0100356 {convLayer, weightsLayer, biasLayer});
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100357}
358
359// Creates a subgraph with five convolutions layers, all supported by the mock backend
360SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
361{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000362 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
363 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davis2cddc722022-04-07 11:32:00 +0100364 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
365 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
366
367 weightInfo.SetConstant(true);
368 biasInfo.SetConstant(true);
369
370 std::vector<float> weightsVector(64);
371 ConstTensor constWeightsTensor(weightInfo, weightsVector);
372
373 std::vector<float> biasVector(16);
374 ConstTensor constBiasTensor(biasInfo, biasVector);
375
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100376
377 Convolution2dDescriptor convolutionDescriptor;
378 convolutionDescriptor.m_StrideX = 1;
379 convolutionDescriptor.m_StrideY = 1;
380 convolutionDescriptor.m_BiasEnabled = true;
381 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
382
383 // Construct the graph
384 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
385 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
386 "conv1 layer", weightInfo, biasInfo, outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100387 ConstantLayer* const weightsLayer1 =
388 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
389 ConstantLayer* const biasLayer1 =
390 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
391
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100392 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
393 "conv2 layer", weightInfo, biasInfo, outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100394 ConstantLayer* const weightsLayer2 =
395 AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, outputInfo);
396 ConstantLayer* const biasLayer2 =
397 AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, outputInfo);
398
399
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100400 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
401 "conv3 layer", weightInfo, biasInfo, outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100402 ConstantLayer* const weightsLayer3 =
403 AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, outputInfo);
404 ConstantLayer* const biasLayer3 =
405 AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, outputInfo);
406
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100407 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
408 "conv4 layer", weightInfo, biasInfo, outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100409 ConstantLayer* const weightsLayer4 =
410 AddConstantLayer(graph, layersInGraph, "Weights Layer 4", constWeightsTensor, outputInfo);
411 ConstantLayer* const biasLayer4 =
412 AddConstantLayer(graph, layersInGraph, "Bias Layer 4", constBiasTensor, outputInfo);
413
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100414 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
415 "conv5 layer", weightInfo, biasInfo, outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100416 ConstantLayer* const weightsLayer5 =
417 AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, outputInfo);
418 ConstantLayer* const biasLayer5 =
419 AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, outputInfo);
420
421
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100422 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
423
424 // Connect the network
425 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100426 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
427 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100428
Keith Davis2cddc722022-04-07 11:32:00 +0100429 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
430 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
431 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
432
433 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
434 weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
435 biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
436
437 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
438 weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
439 biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
440
441 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
442 weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
443 biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
444
445 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
446 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100447 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100448 return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
449 CreateOutputsFrom({ conv5Layer }),
450 { weightsLayer1,
451 biasLayer1,
452 conv1Layer,
453 weightsLayer2,
454 biasLayer2,
455 conv2Layer,
456 weightsLayer3,
457 biasLayer3,
458 conv3Layer,
459 weightsLayer4,
460 biasLayer4,
461 conv4Layer,
462 weightsLayer5,
463 biasLayer5,
464 conv5Layer });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100465}
466
467// Creates a subgraph with both supported and unsupported layers
468// (only convolutions are unsupported by the mock backend)
469SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
470{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000471 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
472 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davis2cddc722022-04-07 11:32:00 +0100473 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
474 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
475
476 weightInfo.SetConstant(true);
477 biasInfo.SetConstant(true);
478
479 std::vector<float> weightsVector(64);
480 ConstTensor constWeightsTensor(weightInfo, weightsVector);
481
482 std::vector<float> biasVector(16);
483 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100484
485 Convolution2dDescriptor convolutionDescriptor;
486 convolutionDescriptor.m_StrideX = 1;
487 convolutionDescriptor.m_StrideY = 1;
488 convolutionDescriptor.m_BiasEnabled = true;
489 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
490
491 Pooling2dDescriptor poolingDescriptor;
492 poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
493 poolingDescriptor.m_PoolWidth = 2;
494 poolingDescriptor.m_PoolHeight = 2;
495 poolingDescriptor.m_StrideX = 2;
496 poolingDescriptor.m_StrideY = 2;
497 poolingDescriptor.m_PadLeft = 1;
498 poolingDescriptor.m_PadRight = 1;
499 poolingDescriptor.m_PadTop = 1;
500 poolingDescriptor.m_PadBottom = 1;
501 poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
502 poolingDescriptor.m_DataLayout = DataLayout::NHWC;
503
504 // Construct the graph
505 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100506 ConstantLayer* const weightsLayer1 =
507 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
508
509 ConstantLayer* const biasLayer1 =
510 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
511
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100512 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
513 "conv1 layer", weightInfo, biasInfo, outputInfo);
514 Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
515 "pooling1 layer", outputInfo);
516 Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
517 "pooling2 layer", outputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100518
519 ConstantLayer* const weightsLayer2 =
520 AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, outputInfo);
521
522 ConstantLayer* const biasLayer2 =
523 AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, outputInfo);
524
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100525 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
526 "conv2 layer", weightInfo, biasInfo, outputInfo);
527 Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
528 "pooling3 layer", outputInfo);
529 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
530
531 // Connect the network
532 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100533 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
534 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100535 conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
536 pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
537 pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100538 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
539 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100540 conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
541 pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
542
Keith Davis2cddc722022-04-07 11:32:00 +0100543 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100544 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100545 return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100546 CreateOutputsFrom({pooling3Layer}),
Keith Davis2cddc722022-04-07 11:32:00 +0100547 {weightsLayer1,
548 biasLayer1,
549 conv1Layer,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100550 pooling1Layer,
551 pooling2Layer,
Keith Davis2cddc722022-04-07 11:32:00 +0100552 weightsLayer2,
553 biasLayer2,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100554 conv2Layer,
555 pooling3Layer});
556}
557
558// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
559SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
560{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000561 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
562 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davis2cddc722022-04-07 11:32:00 +0100563 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
564 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100565
Keith Davis2cddc722022-04-07 11:32:00 +0100566 weightInfo.SetConstant(true);
567 biasInfo.SetConstant(true);
568
569 std::vector<float> weightsVector(64);
570 ConstTensor constWeightsTensor(weightInfo, weightsVector);
571
572 std::vector<float> biasVector(16);
573 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100574 Convolution2dDescriptor convolutionDescriptor;
575 convolutionDescriptor.m_StrideX = 1;
576 convolutionDescriptor.m_StrideY = 1;
577 convolutionDescriptor.m_BiasEnabled = true;
578 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
579
580 // Construct the graph
581 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100582
583 ConstantLayer* const weightsLayer =
584 AddConstantLayer(graph, layersInGraph, "Weights Layer unoptimizable", constWeightsTensor, outputInfo);
585
586 ConstantLayer* const biasLayer =
587 AddConstantLayer(graph, layersInGraph, "Bias Layer unoptimizable", constBiasTensor, outputInfo);
588
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100589 Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
590 "conv layer unoptimizable", weightInfo, biasInfo,
591 outputInfo);
592 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
593
594 // Connect the network
595 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100596 weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
597 biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100598 convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
599
Keith Davis2cddc722022-04-07 11:32:00 +0100600 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100601 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100602 return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100603 CreateOutputsFrom({convLayer}),
Keith Davis2cddc722022-04-07 11:32:00 +0100604 {convLayer, weightsLayer, biasLayer});
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100605}
606
607// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
608SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
609{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000610 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
611 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davis2cddc722022-04-07 11:32:00 +0100612 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
613 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
614
615 weightInfo.SetConstant(true);
616 biasInfo.SetConstant(true);
617
618 std::vector<float> weightsVector(64);
619 ConstTensor constWeightsTensor(weightInfo, weightsVector);
620
621 std::vector<float> biasVector(16);
622 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100623
624 Convolution2dDescriptor convolutionDescriptor;
625 convolutionDescriptor.m_StrideX = 1;
626 convolutionDescriptor.m_StrideY = 1;
627 convolutionDescriptor.m_BiasEnabled = true;
628 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
629
630 // Construct the graph
631 Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
Keith Davis2cddc722022-04-07 11:32:00 +0100632
633 ConstantLayer* const weightsLayer1 =
634 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
635 ConstantLayer* const biasLayer1 =
636 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
637 ConstantLayer* const weightsLayer2 =
638 AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, outputInfo);
639 ConstantLayer* const biasLayer2 =
640 AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, outputInfo);
641 ConstantLayer* const weightsLayer3 =
642 AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, outputInfo);
643 ConstantLayer* const biasLayer3 =
644 AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, outputInfo);
645 ConstantLayer* const weightsLayer4 =
646 AddConstantLayer(graph, layersInGraph, "Weights Layer 4 unoptimizable", constWeightsTensor, outputInfo);
647 ConstantLayer* const biasLayer4 =
648 AddConstantLayer(graph, layersInGraph, "Bias Layer 4 unoptimizable", constBiasTensor, outputInfo);
649 ConstantLayer* const weightsLayer5 =
650 AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, outputInfo);
651 ConstantLayer* const biasLayer5 =
652 AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, outputInfo);
653
654 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
655 "conv1 layer", weightInfo, biasInfo, outputInfo);
656 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph,
657 layersInGraph,
658 convolutionDescriptor,
659 "conv2 layer unoptimizable",
660 weightInfo,
661 biasInfo,
662 outputInfo);
663 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
664 "conv3 layer", weightInfo, biasInfo, outputInfo);
665 Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph,
666 layersInGraph,
667 convolutionDescriptor,
668 "conv4 layer unoptimizable",
669 weightInfo,
670 biasInfo,
671 outputInfo);
672 Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
673 "conv5 layer", weightInfo, biasInfo, outputInfo);
674
675 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100676
677 // Connect the network
678 inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100679 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
680 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
681
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100682 conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100683 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
684 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
685
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100686 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100687 weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
688 biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
689
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100690 conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100691 weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
692 biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
693
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100694 conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100695 weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
696 biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
697
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100698 conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
699
Keith Davis2cddc722022-04-07 11:32:00 +0100700 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100701 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100702 return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100703 CreateOutputsFrom({conv5Layer}),
Keith Davis2cddc722022-04-07 11:32:00 +0100704 {weightsLayer1,
705 biasLayer1,
706 conv1Layer,
707 weightsLayer2,
708 biasLayer2,
709 conv2Layer,
710 weightsLayer3,
711 biasLayer3,
712 conv3Layer,
713 weightsLayer4,
714 biasLayer4,
715 conv4Layer,
716 weightsLayer5,
717 biasLayer5,
718 conv5Layer});
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100719}
720
721// Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
722// this is meant to test input slots coming from different layers
723SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
724{
Derek Lambertif90c56d2020-01-10 17:14:08 +0000725 const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
726 const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Keith Davis2cddc722022-04-07 11:32:00 +0100727 TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
728 TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
729
730 weightInfo.SetConstant(true);
731 biasInfo.SetConstant(true);
732
733 std::vector<float> weightsVector(64);
734 ConstTensor constWeightsTensor(weightInfo, weightsVector);
735
736 std::vector<float> biasVector(16);
737 ConstTensor constBiasTensor(biasInfo, biasVector);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100738
739 Convolution2dDescriptor convolutionDescriptor;
740 convolutionDescriptor.m_StrideX = 1;
741 convolutionDescriptor.m_StrideY = 1;
742 convolutionDescriptor.m_BiasEnabled = true;
743 convolutionDescriptor.m_DataLayout = DataLayout::NHWC;
744
745 // Construct the graph
746 Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
747 Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
Keith Davis2cddc722022-04-07 11:32:00 +0100748
749 ConstantLayer* const weightsLayer1 =
750 AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, outputInfo);
751 ConstantLayer* const biasLayer1 =
752 AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, outputInfo);
753 ConstantLayer* const weightsLayer2 =
754 AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, outputInfo);
755 ConstantLayer* const biasLayer2 =
756 AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, outputInfo);
757 ConstantLayer* const weightsLayer3 =
758 AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, outputInfo);
759 ConstantLayer* const biasLayer3 =
760 AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, outputInfo);
761
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100762 Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
763 "conv1 layer", weightInfo, biasInfo, outputInfo);
764 Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
765 "conv2 layer unoptimizable", weightInfo, biasInfo,
766 outputInfo);
767 Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
768 "conv3 layer", weightInfo, biasInfo, outputInfo);
769 AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
770 Layer* const outputLayer = AddOutputLayer(graph, "output layer");
771
772 // Connect the network
773 input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100774 weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
775 biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100776 conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100777
778 input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
779 weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
780 biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100781 conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
Keith Davis2cddc722022-04-07 11:32:00 +0100782 weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
783 biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100784 conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
Keith Davis2cddc722022-04-07 11:32:00 +0100785
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100786 addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
787
788 // Create the subgraph view for the whole network
Keith Davis2cddc722022-04-07 11:32:00 +0100789 std::vector<unsigned int> ignoreSlots = {1, 2};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100790 return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
Keith Davis2cddc722022-04-07 11:32:00 +0100791 conv2Layer}, ignoreSlots),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100792 CreateOutputsFrom({addLayer}),
Keith Davis2cddc722022-04-07 11:32:00 +0100793 { weightsLayer1,
794 biasLayer1,
795 weightsLayer2,
796 biasLayer2,
797 weightsLayer3,
798 biasLayer3,
799 conv1Layer,
800 conv2Layer,
801 conv3Layer,
802 addLayer });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100803}
804
805// The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
806void FullyUnsupporteSubgraphTestImpl1()
807{
808 Graph graph;
809 LayerNameToLayerMap layersInGraph;
810
811 // Create an unsupported subgraph
812 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100813 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100814
Francis Murtagh56ccf682021-12-13 18:48:12 +0000815 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
816 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
817 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100818
Sadik Armagan1625efc2021-06-10 18:24:34 +0100819 CHECK(subgraphInputSlots.size() == 1);
820 CHECK(subgraphOutputSlots.size() == 1);
821 CHECK(subgraphLayers.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100822
Sadik Armagan1625efc2021-06-10 18:24:34 +0100823 CHECK(Contains(layersInGraph, "pooling layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100824
825 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000826 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100827 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100828 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100829
830 // Optimize the subgraph
831 OptimizationViews optimizationViews;
832
833 // Check that the optimization is carried out correctly, but no optimization is performed
Sadik Armagan1625efc2021-06-10 18:24:34 +0100834 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100835
836 // =======================================================================
837 // The expected results are:
838 // - No substitutions
839 // - Exactly one failed subgraph, corresponding to the whole original one
840 // - No untouched subgraphs
841 // =======================================================================
842
843 // -----------------------
844 // Check the substitutions
845 // -----------------------
846
Sadik Armagan1625efc2021-06-10 18:24:34 +0100847 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100848
849 // --------------------------
850 // Check the failed subgraphs
851 // --------------------------
852
853 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100854 CHECK(failedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100855
856 CheckFailedSubgraph(failedSubgraphs.at(0),
857 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
858 subgraphInputSlots,
859 subgraphOutputSlots,
860 subgraphLayers);
861
862 // -----------------------------
863 // Check the untouched subgraphs
864 // -----------------------------
865
Sadik Armagan1625efc2021-06-10 18:24:34 +0100866 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100867}
868
869// The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
870void FullyUnsupporteSubgraphTestImpl2()
871{
872 Graph graph;
873 LayerNameToLayerMap layersInGraph;
874
875 // Create an unsupported subgraph
876 SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100877 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100878
Francis Murtagh56ccf682021-12-13 18:48:12 +0000879 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
880 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
881 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100882
Sadik Armagan1625efc2021-06-10 18:24:34 +0100883 CHECK(subgraphInputSlots.size() == 1);
884 CHECK(subgraphOutputSlots.size() == 1);
885 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100886
Sadik Armagan1625efc2021-06-10 18:24:34 +0100887 CHECK(Contains(layersInGraph, "pooling1 layer"));
888 CHECK(Contains(layersInGraph, "pooling2 layer"));
889 CHECK(Contains(layersInGraph, "pooling3 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100890
891 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000892 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100893 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100894 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100895
896 // Optimize the subgraph
897 OptimizationViews optimizationViews;
898
899 // Check that the optimization is carried out correctly, but no optimization is performed
Sadik Armagan1625efc2021-06-10 18:24:34 +0100900 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100901
902 // =======================================================================
903 // The expected results are:
904 // - No substitutions
905 // - Exactly one failed subgraph, corresponding to the whole original one
906 // - No untouched subgraphs
907 // =======================================================================
908
909 // -----------------------
910 // Check the substitutions
911 // -----------------------
912
Sadik Armagan1625efc2021-06-10 18:24:34 +0100913 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100914
915 // --------------------------
916 // Check the failed subgraphs
917 // --------------------------
918
919 const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100920 CHECK(failedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100921
Francis Murtagh56ccf682021-12-13 18:48:12 +0000922 std::list<IConnectableLayer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
Sadik Armagan1625efc2021-06-10 18:24:34 +0100923 layersInGraph.at("pooling2 layer"),
924 layersInGraph.at("pooling3 layer") };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100925
926 const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
927
928 CheckFailedSubgraph(failedSubgraph,
929 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
930 subgraphInputSlots,
931 subgraphOutputSlots,
932 subgraphLayers);
933
Francis Murtagh56ccf682021-12-13 18:48:12 +0000934 const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100935
Sadik Armagan1625efc2021-06-10 18:24:34 +0100936 CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
937 CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
938 CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100939
940 // -----------------------------
941 // Check the untouched subgraphs
942 // -----------------------------
943
Sadik Armagan1625efc2021-06-10 18:24:34 +0100944 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100945}
946
947// A simple case with only one layer (convolution) to optimize, supported by the mock backend
948void FullyOptimizableSubgraphTestImpl1()
949{
950 Graph graph;
951 LayerNameToLayerMap layersInGraph;
952
953 // Create a fully optimizable subgraph
954 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100955 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100956
Francis Murtagh56ccf682021-12-13 18:48:12 +0000957 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
958 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
959 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100960
Sadik Armagan1625efc2021-06-10 18:24:34 +0100961 CHECK(subgraphInputSlots.size() == 1);
962 CHECK(subgraphOutputSlots.size() == 1);
Keith Davis2cddc722022-04-07 11:32:00 +0100963 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100964
Sadik Armagan1625efc2021-06-10 18:24:34 +0100965 CHECK(Contains(layersInGraph, "conv layer"));
Keith Davis2cddc722022-04-07 11:32:00 +0100966 CHECK(Contains(layersInGraph, "Weights Layer"));
967 CHECK(Contains(layersInGraph, "Bias Layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100968
969 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +0000970 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100971 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +0100972 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100973
974 // Optimize the subgraph
975 OptimizationViews optimizationViews;
976
977 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +0100978 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100979
980 // ===========================================================================================
981 // The expected results are:
982 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
983 // - No failed subgraphs
984 // - No untouched subgraphs
985 // ===========================================================================================
986
987 // -----------------------
988 // Check the substitutions
989 // -----------------------
990
991 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100992 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +0100993
994 CheckSubstitution(substitutions.at(0),
995 { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
996 { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
997 subgraphInputSlots,
998 subgraphOutputSlots,
999 subgraphLayers);
1000
1001 // --------------------------
1002 // Check the failed subgraphs
1003 // --------------------------
1004
Sadik Armagan1625efc2021-06-10 18:24:34 +01001005 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001006
1007 // -----------------------------
1008 // Check the untouched subgraphs
1009 // -----------------------------
1010
Sadik Armagan1625efc2021-06-10 18:24:34 +01001011 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001012}
1013
1014// A case with five layers (all convolutions) to optimize, all supported by the mock backend
1015void FullyOptimizableSubgraphTestImpl2()
1016{
1017 Graph graph;
1018 LayerNameToLayerMap layersInGraph;
1019
1020 // Create a fully optimizable subgraph
1021 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001022 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001023
Francis Murtagh56ccf682021-12-13 18:48:12 +00001024 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1025 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1026 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001027
Keith Davis2cddc722022-04-07 11:32:00 +01001028 CHECK(subgraphInputSlots.size() == 1);
1029 CHECK(subgraphOutputSlots.size() == 1);
1030 CHECK(subgraphPtr->GetIConnectableLayers().size() == 15);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001031
Sadik Armagan1625efc2021-06-10 18:24:34 +01001032 CHECK(Contains(layersInGraph, "conv1 layer"));
1033 CHECK(Contains(layersInGraph, "conv2 layer"));
1034 CHECK(Contains(layersInGraph, "conv3 layer"));
1035 CHECK(Contains(layersInGraph, "conv4 layer"));
1036 CHECK(Contains(layersInGraph, "conv5 layer"));
Keith Davis2cddc722022-04-07 11:32:00 +01001037 CHECK(Contains(layersInGraph, "Weights Layer 1"));
1038 CHECK(Contains(layersInGraph, "Weights Layer 2"));
1039 CHECK(Contains(layersInGraph, "Weights Layer 3"));
1040 CHECK(Contains(layersInGraph, "Weights Layer 4"));
1041 CHECK(Contains(layersInGraph, "Weights Layer 5"));
1042 CHECK(Contains(layersInGraph, "Bias Layer 1"));
1043 CHECK(Contains(layersInGraph, "Bias Layer 2"));
1044 CHECK(Contains(layersInGraph, "Bias Layer 3"));
1045 CHECK(Contains(layersInGraph, "Bias Layer 4"));
1046 CHECK(Contains(layersInGraph, "Bias Layer 5"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001047
1048 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001049 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001050 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001051 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001052
1053 // Optimize the subgraph
1054 OptimizationViews optimizationViews;
1055
1056 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001057 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001058
1059 // ===========================================================================================
1060 // The expected results are:
1061 // - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
1062 // - No failed subgraphs
1063 // - No untouched subgraphs
1064 // ===========================================================================================
1065
1066 // -----------------------
1067 // Check the substitutions
1068 // -----------------------
1069
1070 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001071 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001072
Keith Davis2cddc722022-04-07 11:32:00 +01001073 std::list<IConnectableLayer*> expectedSubstitutableLayers{
1074 layersInGraph.at("Weights Layer 1"),
1075 layersInGraph.at("Weights Layer 2"),
1076 layersInGraph.at("Weights Layer 3"),
1077 layersInGraph.at("Weights Layer 4"),
1078 layersInGraph.at("Weights Layer 5"),
1079 layersInGraph.at("Bias Layer 1"),
1080 layersInGraph.at("Bias Layer 2"),
1081 layersInGraph.at("Bias Layer 3"),
1082 layersInGraph.at("Bias Layer 4"),
1083 layersInGraph.at("Bias Layer 5"),
1084 layersInGraph.at("conv1 layer"),
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001085 layersInGraph.at("conv2 layer"),
1086 layersInGraph.at("conv3 layer"),
1087 layersInGraph.at("conv4 layer"),
Keith Davis2cddc722022-04-07 11:32:00 +01001088 layersInGraph.at("conv5 layer")};
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001089
1090 const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
1091
Keith Davis2cddc722022-04-07 11:32:00 +01001092 CheckSubstitution(
1093 substitution,
1094 {subgraphInputSlots.size(), subgraphOutputSlots.size(),
1095 subgraphLayers.size()},
1096 {subgraphInputSlots.size(), subgraphOutputSlots.size(), 1},
1097 subgraphInputSlots, subgraphOutputSlots, expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001098
Francis Murtagh56ccf682021-12-13 18:48:12 +00001099 const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
1100 substitution.m_SubstitutableSubgraph.GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001101
Sadik Armagan1625efc2021-06-10 18:24:34 +01001102 CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
1103 CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
1104 CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
1105 CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
1106 CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001107
1108 // --------------------------
1109 // Check the failed subgraphs
1110 // --------------------------
1111
Sadik Armagan1625efc2021-06-10 18:24:34 +01001112 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001113
1114 // -----------------------------
1115 // Check the untouched subgraphs
1116 // -----------------------------
1117
Sadik Armagan1625efc2021-06-10 18:24:34 +01001118 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001119}
1120
1121// The input subgraph contaions both supported and unsupported layers
1122// (but only convolutions are unsupported by the mock backend)
1123void PartiallySupportedSubgraphTestImpl()
1124{
1125 Graph graph;
1126 LayerNameToLayerMap layersInGraph;
1127
1128 // Create a fully optimizable subgraph
1129 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001130 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001131
Francis Murtagh56ccf682021-12-13 18:48:12 +00001132 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1133 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1134 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001135
Sadik Armagan1625efc2021-06-10 18:24:34 +01001136 CHECK(subgraphInputSlots.size() == 1);
1137 CHECK(subgraphOutputSlots.size() == 1);
Keith Davis2cddc722022-04-07 11:32:00 +01001138 CHECK(subgraphLayers.size() == 9);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001139
Keith Davis2cddc722022-04-07 11:32:00 +01001140 CHECK(Contains(layersInGraph, "Weights Layer 1"));
1141 CHECK(Contains(layersInGraph, "Bias Layer 1"));
Sadik Armagan1625efc2021-06-10 18:24:34 +01001142 CHECK(Contains(layersInGraph, "conv1 layer"));
1143 CHECK(Contains(layersInGraph, "pooling1 layer"));
1144 CHECK(Contains(layersInGraph, "pooling2 layer"));
Keith Davis2cddc722022-04-07 11:32:00 +01001145 CHECK(Contains(layersInGraph, "Weights Layer 2"));
1146 CHECK(Contains(layersInGraph, "Bias Layer 2"));
Sadik Armagan1625efc2021-06-10 18:24:34 +01001147 CHECK(Contains(layersInGraph, "conv2 layer"));
1148 CHECK(Contains(layersInGraph, "pooling3 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001149
1150 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001151 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001152 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001153 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001154
1155 // Optimize the subgraph
1156 OptimizationViews optimizationViews;
1157
1158 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001159 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001160
1161 // ========================================================================
1162 // The expected results are:
1163 // - Exactly two substitution, corresponding to the supported layers
1164 // - Exactly two failed subgraphs, corresponding to the unsupported layers
1165 // - No untouched subgraphs
1166 // ========================================================================
1167
1168 // -----------------------
1169 // Check the substitutions
1170 // -----------------------
1171
Rob Hughes30db8ad2019-11-08 15:50:10 +00001172 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001173 CHECK(substitutions.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001174 // Sort into a consistent order
1175 std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001176 return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1177 s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0;
Rob Hughes30db8ad2019-11-08 15:50:10 +00001178 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001179
Keith Davis2cddc722022-04-07 11:32:00 +01001180 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1181 { 1, 1, 3 } };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001182 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1183 { 1, 1, 1 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001184 std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001185 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001186 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001187 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001188 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001189 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlot(0))})
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001190 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001191
1192 std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001193 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001194 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1195 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1196 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1197 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001198 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001199 std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001200 {
Keith Davis2cddc722022-04-07 11:32:00 +01001201 { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1202 { layersInGraph.at("Weights Layer 2"), layersInGraph.at("Bias Layer 2"), layersInGraph.at("conv2 layer") }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001203 };
1204
1205 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1206 {
1207 CheckSubstitution(substitutions.at(substitutionIndex),
1208 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1209 expectedReplacementSubgraphSizes.at(substitutionIndex),
1210 expectedSubstitutableInputSlots.at(substitutionIndex),
1211 expectedSubstitutableOutputSlots.at(substitutionIndex),
1212 expectedSubstitutableLayers.at(substitutionIndex));
1213 }
1214
1215 // --------------------------
1216 // Check the failed subgraphs
1217 // --------------------------
1218
Rob Hughes30db8ad2019-11-08 15:50:10 +00001219 OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001220 CHECK(failedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001221 // Sort into a consistent order
1222 std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001223 return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1224 s2.GetIConnectableLayers().front()->GetName()) < 0;
Rob Hughes30db8ad2019-11-08 15:50:10 +00001225 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001226
1227 std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
1228 { 1, 1, 1 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001229 std::vector<SubgraphView::IInputSlots> expectedFailedInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001230 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001231 ConvertSlotsToISlots<InputSlot, IInputSlot>(
1232 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots())),
1233 ConvertSlotsToISlots<InputSlot, IInputSlot>(
1234 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001235 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001236 std::vector<SubgraphView::IOutputSlots> expectedFailedOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001237 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001238 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1239 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots())),
1240 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1241 ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001242 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001243 std::vector<SubgraphView::IConnectableLayers> expectedFailedLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001244 {
1245 { layersInGraph.at("pooling1 layer"),
1246 layersInGraph.at("pooling2 layer") },
1247 { layersInGraph.at("pooling3 layer") }
1248 };
1249
1250 for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
1251 {
1252 CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
1253 expectedFailedSubgraphSizes.at(failedIndex),
1254 expectedFailedInputSlots.at(failedIndex),
1255 expectedFailedOutputSlots.at(failedIndex),
1256 expectedFailedLayers.at(failedIndex));
1257 }
1258
1259 // -----------------------------
1260 // Check the untouched subgraphs
1261 // -----------------------------
1262
Sadik Armagan1625efc2021-06-10 18:24:34 +01001263 CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001264}
1265
1266// The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
1267void FullyUnoptimizableSubgraphTestImpl1()
1268{
1269 Graph graph;
1270 LayerNameToLayerMap layersInGraph;
1271
1272 // Create a fully optimizable subgraph
1273 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001274 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001275
Francis Murtagh56ccf682021-12-13 18:48:12 +00001276 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1277 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1278 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001279
Sadik Armagan1625efc2021-06-10 18:24:34 +01001280 CHECK(subgraphInputSlots.size() == 1);
1281 CHECK(subgraphOutputSlots.size() == 1);
Keith Davis2cddc722022-04-07 11:32:00 +01001282 CHECK(subgraphLayers.size() == 3);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001283
Sadik Armagan1625efc2021-06-10 18:24:34 +01001284 CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001285
1286 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001287 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001288 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001289 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001290
1291 // Optimize the subgraph
1292 OptimizationViews optimizationViews;
1293
1294 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001295 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001296
1297 // ============================================================================
1298 // The expected results are:
1299 // - No substitutions
1300 // - No failed subgraphs
1301 // - Exactly one untouched subgraph, corresponding to the whole input subgraph
1302 // ============================================================================
1303
1304 // -----------------------
1305 // Check the substitutions
1306 // -----------------------
1307
Sadik Armagan1625efc2021-06-10 18:24:34 +01001308 CHECK(optimizationViews.GetSubstitutions().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001309
1310 // --------------------------
1311 // Check the failed subgraphs
1312 // --------------------------
1313
Sadik Armagan1625efc2021-06-10 18:24:34 +01001314 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001315
1316 // -----------------------------
1317 // Check the untouched subgraphs
1318 // -----------------------------
1319
1320 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001321 CHECK(untouchedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001322
1323 CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
Keith Davis2cddc722022-04-07 11:32:00 +01001324 {subgraphInputSlots.size(),
1325 subgraphOutputSlots.size(), subgraphLayers.size()},
1326 subgraphInputSlots, subgraphOutputSlots,
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001327 subgraphLayers);
1328}
1329
1330// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
1331void PartiallyOptimizableSubgraphTestImpl1()
1332{
1333 Graph graph;
1334 LayerNameToLayerMap layersInGraph;
1335
1336 // Create a fully optimizable subgraph
1337 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001338 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001339
Francis Murtagh56ccf682021-12-13 18:48:12 +00001340 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1341 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1342 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001343
Sadik Armagan1625efc2021-06-10 18:24:34 +01001344 CHECK(subgraphInputSlots.size() == 1);
1345 CHECK(subgraphOutputSlots.size() == 1);
Keith Davis2cddc722022-04-07 11:32:00 +01001346 CHECK(subgraphLayers.size() == 15);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001347
Sadik Armagan1625efc2021-06-10 18:24:34 +01001348 CHECK(Contains(layersInGraph, "conv1 layer"));
1349 CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1350 CHECK(Contains(layersInGraph, "conv3 layer"));
1351 CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1352 CHECK(Contains(layersInGraph, "conv5 layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001353
1354 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001355 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001356 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001357 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001358
1359 // Optimize the subgraph
1360 OptimizationViews optimizationViews;
1361
1362 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001363 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001364
1365 // ===============================================================================
1366 // The expected results are:
1367 // - Exactly three substitutions, corresponding to the optimizable layers
1368 // - No failed subgraphs
1369 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1370 // ===============================================================================
1371
1372 // -----------------------
1373 // Check the substitutions
1374 // -----------------------
1375
Rob Hughes30db8ad2019-11-08 15:50:10 +00001376 OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001377 CHECK(substitutions.size() == 3);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001378 // Sort into a consistent order
1379 std::sort(substitutions.begin(), substitutions.end(),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001380 [](auto s1, auto s2)
1381 { return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1382 s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0; });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001383
Keith Davis2cddc722022-04-07 11:32:00 +01001384 std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1385 { 1, 1, 3 },
1386 { 1, 1, 3 } };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001387 std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1388 { 1, 1, 1 },
1389 { 1, 1, 1 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001390 std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001391 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001392 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001393 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001394 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001395 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlot(0))}),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001396 ConvertSlotsToISlots<InputSlot, IInputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001397 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlot(0))})
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001398 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001399 std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001400 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001401 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1402 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1403 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1404 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots())),
1405 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1406 ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001407 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001408 std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001409 {
Keith Davis2cddc722022-04-07 11:32:00 +01001410 { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1411 { layersInGraph.at("Weights Layer 3"), layersInGraph.at("Bias Layer 3"), layersInGraph.at("conv3 layer") },
1412 { layersInGraph.at("Weights Layer 5"), layersInGraph.at("Bias Layer 5"), layersInGraph.at("conv5 layer") }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001413 };
1414
1415 for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1416 {
1417 CheckSubstitution(substitutions.at(substitutionIndex),
1418 expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1419 expectedReplacementSubgraphSizes.at(substitutionIndex),
1420 expectedSubstitutableInputSlots.at(substitutionIndex),
1421 expectedSubstitutableOutputSlots.at(substitutionIndex),
1422 expectedSubstitutableLayers.at(substitutionIndex));
1423 }
1424
1425 // --------------------------
1426 // Check the failed subgraphs
1427 // --------------------------
1428
Sadik Armagan1625efc2021-06-10 18:24:34 +01001429 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001430
1431 // -----------------------------
1432 // Check the untouched subgraphs
1433 // -----------------------------
1434
Rob Hughes30db8ad2019-11-08 15:50:10 +00001435 OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001436 CHECK(untouchedSubgraphs.size() == 2);
Rob Hughes30db8ad2019-11-08 15:50:10 +00001437 // Sort into a consistent order
1438 std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001439 return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1440 s2.GetIConnectableLayers().front()->GetName()) < 0;
Rob Hughes30db8ad2019-11-08 15:50:10 +00001441 });
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001442
Keith Davis2cddc722022-04-07 11:32:00 +01001443 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 },
1444 { 1, 1, 3 } };
1445 std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots{
1446 ConvertSlotsToISlots<InputSlot,
1447 IInputSlot>({ConvertReferenceTypeToPointerType(
1448 layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))}),
1449 ConvertSlotsToISlots<InputSlot,
1450 IInputSlot>({ConvertReferenceTypeToPointerType(
1451 layersInGraph.at("conv4 layer unoptimizable")->GetInputSlot(0))})};
1452
Francis Murtagh56ccf682021-12-13 18:48:12 +00001453 std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
Keith Davis2cddc722022-04-07 11:32:00 +01001454 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001455 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001456 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())),
Francis Murtagh56ccf682021-12-13 18:48:12 +00001457 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
Keith Davis2cddc722022-04-07 11:32:00 +01001458 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots()))
1459 };
1460
Francis Murtagh56ccf682021-12-13 18:48:12 +00001461 std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
Keith Davis2cddc722022-04-07 11:32:00 +01001462 {
1463 { layersInGraph.at("Weights Layer 2 unoptimizable"),
1464 layersInGraph.at("Bias Layer 2 unoptimizable"),
1465 layersInGraph.at("conv2 layer unoptimizable") },
1466 { layersInGraph.at("Weights Layer 4 unoptimizable"),
1467 layersInGraph.at("Bias Layer 4 unoptimizable"),
1468 layersInGraph.at("conv4 layer unoptimizable") }
1469 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001470
1471 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1472 {
1473 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1474 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1475 expectedUntouchedInputSlots.at(untouchedIndex),
1476 expectedUntouchedOutputSlots.at(untouchedIndex),
1477 expectedUntouchedLayers.at(untouchedIndex));
1478 }
1479}
1480
1481// The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1482// this is meant to test input slots coming from different layers
1483void PartiallyOptimizableSubgraphTestImpl2()
1484{
1485 Graph graph;
1486 LayerNameToLayerMap layersInGraph;
1487
Rob Hughes30db8ad2019-11-08 15:50:10 +00001488 // Create a partially optimizable subgraph
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001489 SubgraphViewSelector::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
Sadik Armagan1625efc2021-06-10 18:24:34 +01001490 CHECK((subgraphPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001491
Francis Murtagh56ccf682021-12-13 18:48:12 +00001492 const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1493 const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1494 const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001495
Sadik Armagan1625efc2021-06-10 18:24:34 +01001496 CHECK(subgraphInputSlots.size() == 2);
1497 CHECK(subgraphOutputSlots.size() == 1);
Keith Davis2cddc722022-04-07 11:32:00 +01001498 CHECK(subgraphLayers.size() == 10);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001499
Sadik Armagan1625efc2021-06-10 18:24:34 +01001500 CHECK(Contains(layersInGraph, "conv1 layer"));
1501 CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1502 CHECK(Contains(layersInGraph, "conv3 layer"));
1503 CHECK(Contains(layersInGraph, "add layer"));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001504
1505 // Create a mock backend object
David Monahanc1536d62020-02-12 15:52:35 +00001506 MockBackendInitialiser initialiser; // Register the Mock Backend
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001507 auto backendObjPtr = CreateBackendObject(MockBackendId());
Sadik Armagan1625efc2021-06-10 18:24:34 +01001508 CHECK((backendObjPtr != nullptr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001509
1510 // Optimize the subgraph
1511 OptimizationViews optimizationViews;
1512
1513 // Check that the optimization is carried out correctly
Sadik Armagan1625efc2021-06-10 18:24:34 +01001514 CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001515
1516 // ==============================================================================
1517 // The expected results are:
1518 // - Exactly one substitution, corresponding to the optimizable layers
1519 // - No failed subgraphs
1520 // - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1521 // ==============================================================================
1522
1523 // -----------------------
1524 // Check the substitutions
1525 // -----------------------
1526
1527 const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001528 CHECK(substitutions.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001529
Keith Davis2cddc722022-04-07 11:32:00 +01001530 ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 7 };
Rob Hughes30db8ad2019-11-08 15:50:10 +00001531 ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001532
Francis Murtagh56ccf682021-12-13 18:48:12 +00001533 SubgraphView::IInputSlots expectedSubstitutableInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001534 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001535 ConvertSlotsToISlots<InputSlot, IInputSlot>({
1536 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0])})[0],
1537 ConvertSlotsToISlots<InputSlot, IInputSlot>({
1538 ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])})[0]
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001539 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001540
1541 SubgraphView::IOutputSlots expectedSubstitutableOutputSlots
1542 {
1543 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1544 ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()))
1545 };
1546
1547 SubgraphView::IConnectableLayers expectedSubstitutableLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001548 {
Keith Davis2cddc722022-04-07 11:32:00 +01001549 layersInGraph.at("Weights Layer 1"),
1550 layersInGraph.at("Weights Layer 3"),
1551 layersInGraph.at("Bias Layer 1"),
1552 layersInGraph.at("Bias Layer 3"),
Rob Hughes30db8ad2019-11-08 15:50:10 +00001553 layersInGraph.at("conv1 layer"),
1554 layersInGraph.at("conv3 layer"),
1555 layersInGraph.at("add layer")
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001556 };
1557
Rob Hughes30db8ad2019-11-08 15:50:10 +00001558 CheckSubstitution(substitutions[0],
1559 expectedSubstitutableSubgraphSizes,
1560 expectedReplacementSubgraphSizes,
1561 expectedSubstitutableInputSlots,
1562 expectedSubstitutableOutputSlots,
1563 expectedSubstitutableLayers);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001564
1565 // --------------------------
1566 // Check the failed subgraphs
1567 // --------------------------
1568
Sadik Armagan1625efc2021-06-10 18:24:34 +01001569 CHECK(optimizationViews.GetFailedSubgraphs().empty());
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001570
1571 // -----------------------------
1572 // Check the untouched subgraphs
1573 // -----------------------------
1574
1575 const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
Sadik Armagan1625efc2021-06-10 18:24:34 +01001576 CHECK(untouchedSubgraphs.size() == 1);
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001577
Keith Davis2cddc722022-04-07 11:32:00 +01001578 std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 } };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001579 std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001580 {
Keith Davis2cddc722022-04-07 11:32:00 +01001581 ConvertSlotsToISlots<InputSlot,
1582 IInputSlot>({ConvertReferenceTypeToPointerType(
1583 layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))})};
Francis Murtagh56ccf682021-12-13 18:48:12 +00001584 std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001585 {
Francis Murtagh56ccf682021-12-13 18:48:12 +00001586 ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1587 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()))
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001588 };
Francis Murtagh56ccf682021-12-13 18:48:12 +00001589 std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001590 {
Keith Davis2cddc722022-04-07 11:32:00 +01001591 { layersInGraph.at("conv2 layer unoptimizable"), layersInGraph.at("Weights Layer 2 unoptimizable"),
1592 layersInGraph.at("Bias Layer 2 unoptimizable") }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001593 };
1594
1595 for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1596 {
1597 CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1598 expectedUntouchedSubgraphSizes.at(untouchedIndex),
1599 expectedUntouchedInputSlots.at(untouchedIndex),
1600 expectedUntouchedOutputSlots.at(untouchedIndex),
1601 expectedUntouchedLayers.at(untouchedIndex));
1602 }
1603}
1604
1605} // Anonymous namespace
1606
Sadik Armagan1625efc2021-06-10 18:24:34 +01001607TEST_SUITE("OptimizeSubGraph")
1608{
1609TEST_CASE("FullyUnsupportedSubgraph1") { FullyUnsupporteSubgraphTestImpl1(); }
1610TEST_CASE("FullyUnsupportedSubgraph2") { FullyUnsupporteSubgraphTestImpl2(); }
1611TEST_CASE("FullyOptimizableSubgraph1") { FullyOptimizableSubgraphTestImpl1(); }
1612TEST_CASE("FullyOptimizableSubgraph2") { FullyOptimizableSubgraphTestImpl2(); }
1613TEST_CASE("PartiallySupportedSubgraph") { PartiallySupportedSubgraphTestImpl(); }
1614TEST_CASE("FullyUnoptimizableSubgraph") { FullyUnoptimizableSubgraphTestImpl1(); }
1615TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1616TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
Matteo Martincighf02e6cd2019-05-17 12:15:30 +01001617
Sadik Armagan1625efc2021-06-10 18:24:34 +01001618}