blob: ac7f7c7fef9816f42ec2b3e67209f9dc6cfee44a [file] [log] [blame]
Colm Donelan17948b52022-02-01 23:37:04 +00001//
2// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Cathal Corbett3464ba12022-03-04 11:36:39 +00006#include <armnn/BackendRegistry.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +00007#include <armnn/backends/MemCopyWorkload.hpp>
8#include <armnnTestUtils/MockBackend.hpp>
9#include <armnnTestUtils/MockTensorHandle.hpp>
Cathal Corbett3464ba12022-03-04 11:36:39 +000010#include <backendsCommon/DefaultAllocator.hpp>
11#include <backendsCommon/test/MockBackendId.hpp>
12#include <SubgraphViewSelector.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +000013
14namespace armnn
15{
16
Colm Donelanc42a9872022-02-02 16:35:09 +000017const BackendId& MockBackend::GetIdStatic()
18{
19 static const BackendId s_Id{MockBackendId()};
20 return s_Id;
21}
22
Colm Donelan17948b52022-02-01 23:37:04 +000023namespace
24{
25static const BackendId s_Id{ MockBackendId() };
26}
27
28MockWorkloadFactory::MockWorkloadFactory(const std::shared_ptr<MockMemoryManager>& memoryManager)
29 : m_MemoryManager(memoryManager)
30{}
31
32MockWorkloadFactory::MockWorkloadFactory()
33 : m_MemoryManager(new MockMemoryManager())
34{}
35
36const BackendId& MockWorkloadFactory::GetBackendId() const
37{
38 return s_Id;
39}
40
41std::unique_ptr<IWorkload> MockWorkloadFactory::CreateWorkload(LayerType type,
42 const QueueDescriptor& descriptor,
43 const WorkloadInfo& info) const
44{
45 switch (type)
46 {
47 case LayerType::MemCopy: {
48 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
49 if (descriptor.m_Inputs.empty())
50 {
51 throw InvalidArgumentException("MockWorkloadFactory: CreateMemCopy() expected an input tensor.");
52 }
53 return std::make_unique<CopyMemGenericWorkload>(*memCopyQueueDescriptor, info);
54 }
55 default:
56 return nullptr;
57 }
58}
59
Cathal Corbett3464ba12022-03-04 11:36:39 +000060bool IsLayerSupported(const armnn::Layer* layer)
61{
62 ARMNN_ASSERT(layer != nullptr);
63
64 armnn::LayerType layerType = layer->GetType();
65 switch (layerType)
66 {
67 case armnn::LayerType::Input:
68 case armnn::LayerType::Output:
69 case armnn::LayerType::Addition:
70 case armnn::LayerType::Convolution2d:
71 // Layer supported
72 return true;
73 default:
74 // Layer unsupported
75 return false;
76 }
77}
78
79bool IsLayerSupported(const armnn::Layer& layer)
80{
81 return IsLayerSupported(&layer);
82}
83
84bool IsLayerOptimizable(const armnn::Layer* layer)
85{
86 ARMNN_ASSERT(layer != nullptr);
87
88 // A Layer is not optimizable if its name contains "unoptimizable"
89 const std::string layerName(layer->GetName());
90 bool optimizable = layerName.find("unoptimizable") == std::string::npos;
91
92 return optimizable;
93}
94
95bool IsLayerOptimizable(const armnn::Layer& layer)
96{
97 return IsLayerOptimizable(&layer);
98}
99
100} // Anonymous namespace
101
102namespace armnn
103{
104
105MockBackendInitialiser::MockBackendInitialiser()
106{
107 BackendRegistryInstance().Register(MockBackend::GetIdStatic(),
108 []()
109 {
110 return IBackendInternalUniquePtr(new MockBackend);
111 });
112}
113
114MockBackendInitialiser::~MockBackendInitialiser()
115{
116 try
117 {
118 BackendRegistryInstance().Deregister(MockBackend::GetIdStatic());
119 }
120 catch (...)
121 {
122 std::cerr << "could not deregister mock backend" << std::endl;
123 }
124}
125
126IBackendInternal::IWorkloadFactoryPtr MockBackend::CreateWorkloadFactory(
127 const IBackendInternal::IMemoryManagerSharedPtr& /*memoryManager*/) const
128{
129 return IWorkloadFactoryPtr{};
130}
131
132IBackendInternal::IBackendContextPtr MockBackend::CreateBackendContext(const IRuntime::CreationOptions&) const
133{
134 return IBackendContextPtr{};
135}
136
137IBackendInternal::IBackendProfilingContextPtr MockBackend::CreateBackendProfilingContext(
138 const IRuntime::CreationOptions& options, IBackendProfilingPtr& backendProfiling)
139{
140 IgnoreUnused(options);
141 std::shared_ptr<armnn::MockBackendProfilingContext> context =
142 std::make_shared<MockBackendProfilingContext>(backendProfiling);
143 MockBackendProfilingService::Instance().SetProfilingContextPtr(context);
144 return context;
145}
146
147IBackendInternal::IMemoryManagerUniquePtr MockBackend::CreateMemoryManager() const
148{
149 return IMemoryManagerUniquePtr{};
150}
151
152IBackendInternal::ILayerSupportSharedPtr MockBackend::GetLayerSupport() const
153{
154 static ILayerSupportSharedPtr layerSupport{new MockLayerSupport};
155 return layerSupport;
156}
157
158OptimizationViews MockBackend::OptimizeSubgraphView(const SubgraphView& subgraph) const
159{
160 // Prepare the optimization views
161 OptimizationViews optimizationViews;
162
163 // Get the layers of the input sub-graph
164 const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
165
166 // Parse the layers
167 SubgraphView::IConnectableLayers supportedLayers;
168 SubgraphView::IConnectableLayers unsupportedLayers;
169 SubgraphView::IConnectableLayers untouchedLayers;
170 std::for_each(subgraphLayers.begin(),
171 subgraphLayers.end(),
172 [&](IConnectableLayer* layer)
173 {
174 bool supported = IsLayerSupported(PolymorphicDowncast<Layer*>(layer));
175 if (supported)
176 {
177 // Layer supported, check if it's optimizable
178 bool optimizable = IsLayerOptimizable(PolymorphicDowncast<Layer*>(layer));
179 if (optimizable)
180 {
181 // Layer fully supported
182 supportedLayers.push_back(layer);
183 }
184 else
185 {
186 // Layer supported but not optimizable
187 untouchedLayers.push_back(layer);
188 }
189 }
190 else
191 {
192 // Layer unsupported
193 unsupportedLayers.push_back(layer);
194 }
195 });
196
197 // Check if there are supported layers
198 if (!supportedLayers.empty())
199 {
200 // Select the layers that are neither inputs or outputs, but that are optimizable
201 auto supportedSubgraphSelector = [](const Layer& layer)
202 {
203 return layer.GetType() != LayerType::Input &&
204 layer.GetType() != LayerType::Output &&
205 IsLayerSupported(layer) &&
206 IsLayerOptimizable(layer);
207 };
208
209 // Apply the subgraph selector to the supported layers to group them into sub-graphs were appropriate
210 SubgraphView mutableSubgraph(subgraph);
211 SubgraphViewSelector::Subgraphs supportedSubgraphs =
212 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, supportedSubgraphSelector);
213
214 // Create a substitution pair for each supported sub-graph
215 std::for_each(supportedSubgraphs.begin(),
216 supportedSubgraphs.end(),
217 [&optimizationViews](const SubgraphView::SubgraphViewPtr& supportedSubgraph)
218 {
219 ARMNN_ASSERT(supportedSubgraph != nullptr);
220
221 CompiledBlobPtr blobPtr;
222 BackendId backend = MockBackendId();
223
224 IConnectableLayer* preCompiledLayer =
225 optimizationViews.GetINetwork()->AddPrecompiledLayer(
226 PreCompiledDescriptor(supportedSubgraph->GetNumInputSlots(),
227 supportedSubgraph->GetNumOutputSlots()),
228 std::move(blobPtr),
229 backend,
230 nullptr);
231
232 SubgraphView substitutionSubgraph(*supportedSubgraph);
233 SubgraphView replacementSubgraph(preCompiledLayer);
234
235 optimizationViews.AddSubstitution({ substitutionSubgraph, replacementSubgraph });
236 });
237 }
238
239 // Check if there are unsupported layers
240 if (!unsupportedLayers.empty())
241 {
242 // Select the layers that are neither inputs or outputs, and are not optimizable
243 auto unsupportedSubgraphSelector = [](const Layer& layer)
244 {
245 return layer.GetType() != LayerType::Input &&
246 layer.GetType() != LayerType::Output &&
247 !IsLayerSupported(layer);
248 };
249
250 // Apply the subgraph selector to the unsupported layers to group them into sub-graphs were appropriate
251 SubgraphView mutableSubgraph(subgraph);
252 SubgraphViewSelector::Subgraphs unsupportedSubgraphs =
253 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, unsupportedSubgraphSelector);
254
255 // Add each unsupported sub-graph to the list of failed sub-graphs in the optimizization views
256 std::for_each(unsupportedSubgraphs.begin(),
257 unsupportedSubgraphs.end(),
258 [&optimizationViews](const SubgraphView::SubgraphViewPtr& unsupportedSubgraph)
259 {
260 ARMNN_ASSERT(unsupportedSubgraph != nullptr);
261
262 optimizationViews.AddFailedSubgraph(SubgraphView(*unsupportedSubgraph));
263 });
264 }
265
266 // Check if there are untouched layers
267 if (!untouchedLayers.empty())
268 {
269 // Select the layers that are neither inputs or outputs, that are supported but that and are not optimizable
270 auto untouchedSubgraphSelector = [](const Layer& layer)
271 {
272 return layer.GetType() != LayerType::Input &&
273 layer.GetType() != LayerType::Output &&
274 IsLayerSupported(layer) &&
275 !IsLayerOptimizable(layer);
276 };
277
278 // Apply the subgraph selector to the untouched layers to group them into sub-graphs were appropriate
279 SubgraphView mutableSubgraph(subgraph);
280 SubgraphViewSelector::Subgraphs untouchedSubgraphs =
281 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, untouchedSubgraphSelector);
282
283 // Add each untouched sub-graph to the list of untouched sub-graphs in the optimizization views
284 std::for_each(untouchedSubgraphs.begin(),
285 untouchedSubgraphs.end(),
286 [&optimizationViews](const SubgraphView::SubgraphViewPtr& untouchedSubgraph)
287 {
288 ARMNN_ASSERT(untouchedSubgraph != nullptr);
289
290 optimizationViews.AddUntouchedSubgraph(SubgraphView(*untouchedSubgraph));
291 });
292 }
293
294 return optimizationViews;
295}
296
297std::unique_ptr<ICustomAllocator> MockBackend::GetDefaultAllocator() const
298{
299 return std::make_unique<DefaultAllocator>();
300}
301
Colm Donelan17948b52022-02-01 23:37:04 +0000302} // namespace armnn