blob: 5dfe9a3b8b35286e25b1d80d3ee0977660d6b028 [file] [log] [blame]
Colm Donelan17948b52022-02-01 23:37:04 +00001//
2// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Cathal Corbett3464ba12022-03-04 11:36:39 +00006#include <armnn/BackendRegistry.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +00007#include <armnn/backends/MemCopyWorkload.hpp>
8#include <armnnTestUtils/MockBackend.hpp>
9#include <armnnTestUtils/MockTensorHandle.hpp>
Cathal Corbett3464ba12022-03-04 11:36:39 +000010#include <backendsCommon/DefaultAllocator.hpp>
11#include <backendsCommon/test/MockBackendId.hpp>
12#include <SubgraphViewSelector.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +000013
14namespace armnn
15{
16
Colm Donelanc42a9872022-02-02 16:35:09 +000017const BackendId& MockBackend::GetIdStatic()
18{
19 static const BackendId s_Id{MockBackendId()};
20 return s_Id;
21}
22
Colm Donelan17948b52022-02-01 23:37:04 +000023namespace
24{
25static const BackendId s_Id{ MockBackendId() };
26}
27
28MockWorkloadFactory::MockWorkloadFactory(const std::shared_ptr<MockMemoryManager>& memoryManager)
29 : m_MemoryManager(memoryManager)
30{}
31
32MockWorkloadFactory::MockWorkloadFactory()
33 : m_MemoryManager(new MockMemoryManager())
34{}
35
36const BackendId& MockWorkloadFactory::GetBackendId() const
37{
38 return s_Id;
39}
40
41std::unique_ptr<IWorkload> MockWorkloadFactory::CreateWorkload(LayerType type,
42 const QueueDescriptor& descriptor,
43 const WorkloadInfo& info) const
44{
45 switch (type)
46 {
47 case LayerType::MemCopy: {
48 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
49 if (descriptor.m_Inputs.empty())
50 {
51 throw InvalidArgumentException("MockWorkloadFactory: CreateMemCopy() expected an input tensor.");
52 }
53 return std::make_unique<CopyMemGenericWorkload>(*memCopyQueueDescriptor, info);
54 }
55 default:
56 return nullptr;
57 }
58}
59
Cathal Corbett3464ba12022-03-04 11:36:39 +000060bool IsLayerSupported(const armnn::Layer* layer)
61{
62 ARMNN_ASSERT(layer != nullptr);
63
64 armnn::LayerType layerType = layer->GetType();
65 switch (layerType)
66 {
67 case armnn::LayerType::Input:
68 case armnn::LayerType::Output:
Keith Davis2cddc722022-04-07 11:32:00 +010069 case armnn::LayerType::Constant:
Cathal Corbett3464ba12022-03-04 11:36:39 +000070 case armnn::LayerType::Addition:
71 case armnn::LayerType::Convolution2d:
72 // Layer supported
73 return true;
74 default:
75 // Layer unsupported
76 return false;
77 }
78}
79
80bool IsLayerSupported(const armnn::Layer& layer)
81{
82 return IsLayerSupported(&layer);
83}
84
85bool IsLayerOptimizable(const armnn::Layer* layer)
86{
87 ARMNN_ASSERT(layer != nullptr);
88
89 // A Layer is not optimizable if its name contains "unoptimizable"
90 const std::string layerName(layer->GetName());
91 bool optimizable = layerName.find("unoptimizable") == std::string::npos;
92
93 return optimizable;
94}
95
96bool IsLayerOptimizable(const armnn::Layer& layer)
97{
98 return IsLayerOptimizable(&layer);
99}
100
101} // Anonymous namespace
102
103namespace armnn
104{
105
106MockBackendInitialiser::MockBackendInitialiser()
107{
108 BackendRegistryInstance().Register(MockBackend::GetIdStatic(),
109 []()
110 {
111 return IBackendInternalUniquePtr(new MockBackend);
112 });
113}
114
115MockBackendInitialiser::~MockBackendInitialiser()
116{
117 try
118 {
119 BackendRegistryInstance().Deregister(MockBackend::GetIdStatic());
120 }
121 catch (...)
122 {
123 std::cerr << "could not deregister mock backend" << std::endl;
124 }
125}
126
127IBackendInternal::IWorkloadFactoryPtr MockBackend::CreateWorkloadFactory(
128 const IBackendInternal::IMemoryManagerSharedPtr& /*memoryManager*/) const
129{
130 return IWorkloadFactoryPtr{};
131}
132
133IBackendInternal::IBackendContextPtr MockBackend::CreateBackendContext(const IRuntime::CreationOptions&) const
134{
135 return IBackendContextPtr{};
136}
137
138IBackendInternal::IBackendProfilingContextPtr MockBackend::CreateBackendProfilingContext(
139 const IRuntime::CreationOptions& options, IBackendProfilingPtr& backendProfiling)
140{
141 IgnoreUnused(options);
142 std::shared_ptr<armnn::MockBackendProfilingContext> context =
143 std::make_shared<MockBackendProfilingContext>(backendProfiling);
144 MockBackendProfilingService::Instance().SetProfilingContextPtr(context);
145 return context;
146}
147
148IBackendInternal::IMemoryManagerUniquePtr MockBackend::CreateMemoryManager() const
149{
150 return IMemoryManagerUniquePtr{};
151}
152
153IBackendInternal::ILayerSupportSharedPtr MockBackend::GetLayerSupport() const
154{
155 static ILayerSupportSharedPtr layerSupport{new MockLayerSupport};
156 return layerSupport;
157}
158
159OptimizationViews MockBackend::OptimizeSubgraphView(const SubgraphView& subgraph) const
160{
161 // Prepare the optimization views
162 OptimizationViews optimizationViews;
163
164 // Get the layers of the input sub-graph
165 const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
166
167 // Parse the layers
168 SubgraphView::IConnectableLayers supportedLayers;
169 SubgraphView::IConnectableLayers unsupportedLayers;
170 SubgraphView::IConnectableLayers untouchedLayers;
171 std::for_each(subgraphLayers.begin(),
172 subgraphLayers.end(),
173 [&](IConnectableLayer* layer)
174 {
175 bool supported = IsLayerSupported(PolymorphicDowncast<Layer*>(layer));
176 if (supported)
177 {
178 // Layer supported, check if it's optimizable
179 bool optimizable = IsLayerOptimizable(PolymorphicDowncast<Layer*>(layer));
180 if (optimizable)
181 {
182 // Layer fully supported
183 supportedLayers.push_back(layer);
184 }
185 else
186 {
187 // Layer supported but not optimizable
188 untouchedLayers.push_back(layer);
189 }
190 }
191 else
192 {
193 // Layer unsupported
194 unsupportedLayers.push_back(layer);
195 }
196 });
197
198 // Check if there are supported layers
199 if (!supportedLayers.empty())
200 {
201 // Select the layers that are neither inputs or outputs, but that are optimizable
202 auto supportedSubgraphSelector = [](const Layer& layer)
203 {
204 return layer.GetType() != LayerType::Input &&
205 layer.GetType() != LayerType::Output &&
206 IsLayerSupported(layer) &&
207 IsLayerOptimizable(layer);
208 };
209
210 // Apply the subgraph selector to the supported layers to group them into sub-graphs were appropriate
211 SubgraphView mutableSubgraph(subgraph);
212 SubgraphViewSelector::Subgraphs supportedSubgraphs =
213 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, supportedSubgraphSelector);
214
215 // Create a substitution pair for each supported sub-graph
216 std::for_each(supportedSubgraphs.begin(),
217 supportedSubgraphs.end(),
218 [&optimizationViews](const SubgraphView::SubgraphViewPtr& supportedSubgraph)
219 {
220 ARMNN_ASSERT(supportedSubgraph != nullptr);
221
222 CompiledBlobPtr blobPtr;
223 BackendId backend = MockBackendId();
224
225 IConnectableLayer* preCompiledLayer =
226 optimizationViews.GetINetwork()->AddPrecompiledLayer(
227 PreCompiledDescriptor(supportedSubgraph->GetNumInputSlots(),
228 supportedSubgraph->GetNumOutputSlots()),
229 std::move(blobPtr),
230 backend,
231 nullptr);
232
233 SubgraphView substitutionSubgraph(*supportedSubgraph);
234 SubgraphView replacementSubgraph(preCompiledLayer);
235
236 optimizationViews.AddSubstitution({ substitutionSubgraph, replacementSubgraph });
237 });
238 }
239
240 // Check if there are unsupported layers
241 if (!unsupportedLayers.empty())
242 {
243 // Select the layers that are neither inputs or outputs, and are not optimizable
244 auto unsupportedSubgraphSelector = [](const Layer& layer)
245 {
246 return layer.GetType() != LayerType::Input &&
247 layer.GetType() != LayerType::Output &&
248 !IsLayerSupported(layer);
249 };
250
251 // Apply the subgraph selector to the unsupported layers to group them into sub-graphs were appropriate
252 SubgraphView mutableSubgraph(subgraph);
253 SubgraphViewSelector::Subgraphs unsupportedSubgraphs =
254 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, unsupportedSubgraphSelector);
255
256 // Add each unsupported sub-graph to the list of failed sub-graphs in the optimizization views
257 std::for_each(unsupportedSubgraphs.begin(),
258 unsupportedSubgraphs.end(),
259 [&optimizationViews](const SubgraphView::SubgraphViewPtr& unsupportedSubgraph)
260 {
261 ARMNN_ASSERT(unsupportedSubgraph != nullptr);
262
263 optimizationViews.AddFailedSubgraph(SubgraphView(*unsupportedSubgraph));
264 });
265 }
266
267 // Check if there are untouched layers
268 if (!untouchedLayers.empty())
269 {
270 // Select the layers that are neither inputs or outputs, that are supported but that and are not optimizable
271 auto untouchedSubgraphSelector = [](const Layer& layer)
272 {
273 return layer.GetType() != LayerType::Input &&
274 layer.GetType() != LayerType::Output &&
275 IsLayerSupported(layer) &&
276 !IsLayerOptimizable(layer);
277 };
278
279 // Apply the subgraph selector to the untouched layers to group them into sub-graphs were appropriate
280 SubgraphView mutableSubgraph(subgraph);
281 SubgraphViewSelector::Subgraphs untouchedSubgraphs =
282 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, untouchedSubgraphSelector);
283
284 // Add each untouched sub-graph to the list of untouched sub-graphs in the optimizization views
285 std::for_each(untouchedSubgraphs.begin(),
286 untouchedSubgraphs.end(),
287 [&optimizationViews](const SubgraphView::SubgraphViewPtr& untouchedSubgraph)
288 {
289 ARMNN_ASSERT(untouchedSubgraph != nullptr);
290
291 optimizationViews.AddUntouchedSubgraph(SubgraphView(*untouchedSubgraph));
292 });
293 }
294
295 return optimizationViews;
296}
297
298std::unique_ptr<ICustomAllocator> MockBackend::GetDefaultAllocator() const
299{
300 return std::make_unique<DefaultAllocator>();
301}
302
Colm Donelan17948b52022-02-01 23:37:04 +0000303} // namespace armnn