blob: d95459525a2f7aaa0d52e0209d98f2ca0e304d4d [file] [log] [blame]
Colm Donelan17948b52022-02-01 23:37:04 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved.
Colm Donelan17948b52022-02-01 23:37:04 +00003// SPDX-License-Identifier: MIT
4//
5
Cathal Corbett3464ba12022-03-04 11:36:39 +00006#include <armnn/BackendRegistry.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +00007#include <armnn/backends/MemCopyWorkload.hpp>
8#include <armnnTestUtils/MockBackend.hpp>
9#include <armnnTestUtils/MockTensorHandle.hpp>
Cathal Corbett3464ba12022-03-04 11:36:39 +000010#include <backendsCommon/DefaultAllocator.hpp>
11#include <backendsCommon/test/MockBackendId.hpp>
12#include <SubgraphViewSelector.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +000013
Matthew Benthamc1c5f2a2023-03-30 14:24:46 +000014#include "Layer.hpp"
15
Colm Donelan17948b52022-02-01 23:37:04 +000016namespace armnn
17{
18
Colm Donelanc42a9872022-02-02 16:35:09 +000019const BackendId& MockBackend::GetIdStatic()
20{
21 static const BackendId s_Id{MockBackendId()};
22 return s_Id;
23}
24
Colm Donelan17948b52022-02-01 23:37:04 +000025namespace
26{
27static const BackendId s_Id{ MockBackendId() };
28}
29
30MockWorkloadFactory::MockWorkloadFactory(const std::shared_ptr<MockMemoryManager>& memoryManager)
31 : m_MemoryManager(memoryManager)
32{}
33
34MockWorkloadFactory::MockWorkloadFactory()
35 : m_MemoryManager(new MockMemoryManager())
36{}
37
38const BackendId& MockWorkloadFactory::GetBackendId() const
39{
40 return s_Id;
41}
42
43std::unique_ptr<IWorkload> MockWorkloadFactory::CreateWorkload(LayerType type,
44 const QueueDescriptor& descriptor,
45 const WorkloadInfo& info) const
46{
47 switch (type)
48 {
49 case LayerType::MemCopy: {
50 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
51 if (descriptor.m_Inputs.empty())
52 {
53 throw InvalidArgumentException("MockWorkloadFactory: CreateMemCopy() expected an input tensor.");
54 }
55 return std::make_unique<CopyMemGenericWorkload>(*memCopyQueueDescriptor, info);
56 }
57 default:
58 return nullptr;
59 }
60}
61
Cathal Corbett3464ba12022-03-04 11:36:39 +000062bool IsLayerSupported(const armnn::Layer* layer)
63{
64 ARMNN_ASSERT(layer != nullptr);
65
66 armnn::LayerType layerType = layer->GetType();
67 switch (layerType)
68 {
69 case armnn::LayerType::Input:
70 case armnn::LayerType::Output:
Keith Davisb4dd5cc2022-04-07 11:32:00 +010071 case armnn::LayerType::Constant:
Cathal Corbett3464ba12022-03-04 11:36:39 +000072 case armnn::LayerType::Addition:
73 case armnn::LayerType::Convolution2d:
Mike Kelly3ec30772023-03-08 13:47:17 +000074 case armnn::LayerType::ElementwiseBinary:
Cathal Corbett3464ba12022-03-04 11:36:39 +000075 // Layer supported
76 return true;
77 default:
78 // Layer unsupported
79 return false;
80 }
81}
82
83bool IsLayerSupported(const armnn::Layer& layer)
84{
85 return IsLayerSupported(&layer);
86}
87
88bool IsLayerOptimizable(const armnn::Layer* layer)
89{
90 ARMNN_ASSERT(layer != nullptr);
91
92 // A Layer is not optimizable if its name contains "unoptimizable"
93 const std::string layerName(layer->GetName());
94 bool optimizable = layerName.find("unoptimizable") == std::string::npos;
95
96 return optimizable;
97}
98
99bool IsLayerOptimizable(const armnn::Layer& layer)
100{
101 return IsLayerOptimizable(&layer);
102}
103
104} // Anonymous namespace
105
106namespace armnn
107{
108
109MockBackendInitialiser::MockBackendInitialiser()
110{
111 BackendRegistryInstance().Register(MockBackend::GetIdStatic(),
112 []()
113 {
114 return IBackendInternalUniquePtr(new MockBackend);
115 });
116}
117
118MockBackendInitialiser::~MockBackendInitialiser()
119{
120 try
121 {
122 BackendRegistryInstance().Deregister(MockBackend::GetIdStatic());
123 }
124 catch (...)
125 {
126 std::cerr << "could not deregister mock backend" << std::endl;
127 }
128}
129
130IBackendInternal::IWorkloadFactoryPtr MockBackend::CreateWorkloadFactory(
131 const IBackendInternal::IMemoryManagerSharedPtr& /*memoryManager*/) const
132{
133 return IWorkloadFactoryPtr{};
134}
135
136IBackendInternal::IBackendContextPtr MockBackend::CreateBackendContext(const IRuntime::CreationOptions&) const
137{
138 return IBackendContextPtr{};
139}
140
141IBackendInternal::IBackendProfilingContextPtr MockBackend::CreateBackendProfilingContext(
142 const IRuntime::CreationOptions& options, IBackendProfilingPtr& backendProfiling)
143{
144 IgnoreUnused(options);
145 std::shared_ptr<armnn::MockBackendProfilingContext> context =
146 std::make_shared<MockBackendProfilingContext>(backendProfiling);
147 MockBackendProfilingService::Instance().SetProfilingContextPtr(context);
148 return context;
149}
150
151IBackendInternal::IMemoryManagerUniquePtr MockBackend::CreateMemoryManager() const
152{
153 return IMemoryManagerUniquePtr{};
154}
155
156IBackendInternal::ILayerSupportSharedPtr MockBackend::GetLayerSupport() const
157{
158 static ILayerSupportSharedPtr layerSupport{new MockLayerSupport};
159 return layerSupport;
160}
161
162OptimizationViews MockBackend::OptimizeSubgraphView(const SubgraphView& subgraph) const
163{
164 // Prepare the optimization views
165 OptimizationViews optimizationViews;
166
167 // Get the layers of the input sub-graph
168 const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
169
170 // Parse the layers
171 SubgraphView::IConnectableLayers supportedLayers;
172 SubgraphView::IConnectableLayers unsupportedLayers;
173 SubgraphView::IConnectableLayers untouchedLayers;
174 std::for_each(subgraphLayers.begin(),
175 subgraphLayers.end(),
176 [&](IConnectableLayer* layer)
177 {
178 bool supported = IsLayerSupported(PolymorphicDowncast<Layer*>(layer));
179 if (supported)
180 {
181 // Layer supported, check if it's optimizable
182 bool optimizable = IsLayerOptimizable(PolymorphicDowncast<Layer*>(layer));
183 if (optimizable)
184 {
185 // Layer fully supported
186 supportedLayers.push_back(layer);
187 }
188 else
189 {
190 // Layer supported but not optimizable
191 untouchedLayers.push_back(layer);
192 }
193 }
194 else
195 {
196 // Layer unsupported
197 unsupportedLayers.push_back(layer);
198 }
199 });
200
201 // Check if there are supported layers
202 if (!supportedLayers.empty())
203 {
204 // Select the layers that are neither inputs or outputs, but that are optimizable
205 auto supportedSubgraphSelector = [](const Layer& layer)
206 {
207 return layer.GetType() != LayerType::Input &&
208 layer.GetType() != LayerType::Output &&
209 IsLayerSupported(layer) &&
210 IsLayerOptimizable(layer);
211 };
212
213 // Apply the subgraph selector to the supported layers to group them into sub-graphs were appropriate
214 SubgraphView mutableSubgraph(subgraph);
215 SubgraphViewSelector::Subgraphs supportedSubgraphs =
216 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, supportedSubgraphSelector);
217
218 // Create a substitution pair for each supported sub-graph
219 std::for_each(supportedSubgraphs.begin(),
220 supportedSubgraphs.end(),
221 [&optimizationViews](const SubgraphView::SubgraphViewPtr& supportedSubgraph)
222 {
223 ARMNN_ASSERT(supportedSubgraph != nullptr);
224
225 CompiledBlobPtr blobPtr;
226 BackendId backend = MockBackendId();
227
228 IConnectableLayer* preCompiledLayer =
229 optimizationViews.GetINetwork()->AddPrecompiledLayer(
230 PreCompiledDescriptor(supportedSubgraph->GetNumInputSlots(),
231 supportedSubgraph->GetNumOutputSlots()),
232 std::move(blobPtr),
233 backend,
234 nullptr);
235
236 SubgraphView substitutionSubgraph(*supportedSubgraph);
237 SubgraphView replacementSubgraph(preCompiledLayer);
238
239 optimizationViews.AddSubstitution({ substitutionSubgraph, replacementSubgraph });
240 });
241 }
242
243 // Check if there are unsupported layers
244 if (!unsupportedLayers.empty())
245 {
246 // Select the layers that are neither inputs or outputs, and are not optimizable
247 auto unsupportedSubgraphSelector = [](const Layer& layer)
248 {
249 return layer.GetType() != LayerType::Input &&
250 layer.GetType() != LayerType::Output &&
251 !IsLayerSupported(layer);
252 };
253
254 // Apply the subgraph selector to the unsupported layers to group them into sub-graphs were appropriate
255 SubgraphView mutableSubgraph(subgraph);
256 SubgraphViewSelector::Subgraphs unsupportedSubgraphs =
257 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, unsupportedSubgraphSelector);
258
259 // Add each unsupported sub-graph to the list of failed sub-graphs in the optimizization views
260 std::for_each(unsupportedSubgraphs.begin(),
261 unsupportedSubgraphs.end(),
262 [&optimizationViews](const SubgraphView::SubgraphViewPtr& unsupportedSubgraph)
263 {
264 ARMNN_ASSERT(unsupportedSubgraph != nullptr);
265
266 optimizationViews.AddFailedSubgraph(SubgraphView(*unsupportedSubgraph));
267 });
268 }
269
270 // Check if there are untouched layers
271 if (!untouchedLayers.empty())
272 {
273 // Select the layers that are neither inputs or outputs, that are supported but that and are not optimizable
274 auto untouchedSubgraphSelector = [](const Layer& layer)
275 {
276 return layer.GetType() != LayerType::Input &&
277 layer.GetType() != LayerType::Output &&
278 IsLayerSupported(layer) &&
279 !IsLayerOptimizable(layer);
280 };
281
282 // Apply the subgraph selector to the untouched layers to group them into sub-graphs were appropriate
283 SubgraphView mutableSubgraph(subgraph);
284 SubgraphViewSelector::Subgraphs untouchedSubgraphs =
285 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, untouchedSubgraphSelector);
286
287 // Add each untouched sub-graph to the list of untouched sub-graphs in the optimizization views
288 std::for_each(untouchedSubgraphs.begin(),
289 untouchedSubgraphs.end(),
290 [&optimizationViews](const SubgraphView::SubgraphViewPtr& untouchedSubgraph)
291 {
292 ARMNN_ASSERT(untouchedSubgraph != nullptr);
293
294 optimizationViews.AddUntouchedSubgraph(SubgraphView(*untouchedSubgraph));
295 });
296 }
297
298 return optimizationViews;
299}
300
301std::unique_ptr<ICustomAllocator> MockBackend::GetDefaultAllocator() const
302{
303 return std::make_unique<DefaultAllocator>();
304}
305
Matthew Benthamc1c5f2a2023-03-30 14:24:46 +0000306} // namespace armnn