blob: 8c8879c8be06b9fb4b5026bee008fc652f6c74a3 [file] [log] [blame]
arovir014424b0a2018-10-04 10:46:04 +01001//
Matthew Sloyan2d213a72022-06-30 17:13:04 +01002// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
arovir014424b0a2018-10-04 10:46:04 +01003// SPDX-License-Identifier: MIT
4//
5
6#include "RefBackend.hpp"
David Beck3e9e1152018-10-17 14:17:50 +01007#include "RefBackendId.hpp"
arovir01a0944792018-10-11 15:00:58 +01008#include "RefWorkloadFactory.hpp"
David Beck111b5d92018-11-12 14:59:37 +00009#include "RefLayerSupport.hpp"
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010010#include "RefTensorHandleFactory.hpp"
arovir01a0944792018-10-11 15:00:58 +010011
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000013#include <armnn/backends/IBackendContext.hpp>
14#include <armnn/backends/IMemoryManager.hpp>
Jan Eilers3c9e0452020-04-10 13:00:44 +010015#include <armnn/utility/PolymorphicDowncast.hpp>
Francis Murtaghe8d7ccb2021-10-14 17:30:24 +010016#include <backendsCommon/DefaultAllocator.hpp>
Cathal Corbett3883b272022-07-22 16:03:36 +010017#include <backendsCommon/SubgraphUtils.hpp>
David Beck111b5d92018-11-12 14:59:37 +000018
David Beck263e3492018-11-09 14:46:40 +000019#include <Optimizer.hpp>
arovir01a0944792018-10-11 15:00:58 +010020
arovir014424b0a2018-10-04 10:46:04 +010021namespace armnn
22{
23
David Beck3cc9a622018-10-12 10:38:31 +010024const BackendId& RefBackend::GetIdStatic()
arovir014424b0a2018-10-04 10:46:04 +010025{
David Beck3e9e1152018-10-17 14:17:50 +010026 static const BackendId s_Id{RefBackendId()};
arovir014424b0a2018-10-04 10:46:04 +010027 return s_Id;
28}
29
Aron Virginas-Tar56055192018-11-12 18:10:43 +000030IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory(
31 const IBackendInternal::IMemoryManagerSharedPtr& memoryManager) const
arovir014424b0a2018-10-04 10:46:04 +010032{
Jan Eilers3c9e0452020-04-10 13:00:44 +010033 return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager));
arovir014424b0a2018-10-04 10:46:04 +010034}
35
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010036IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory(
37 class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry) const
38{
39 auto memoryManager = std::make_shared<RefMemoryManager>();
40
41 tensorHandleFactoryRegistry.RegisterMemoryManager(memoryManager);
Narumol Prangnawarat1c52a382022-01-13 11:47:35 +000042
43 std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager);
44 // Register copy and import factory pair
45 tensorHandleFactoryRegistry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId());
46 // Register the factory
47 tensorHandleFactoryRegistry.RegisterFactory(std::move(factory));
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010048
Jan Eilers3c9e0452020-04-10 13:00:44 +010049 return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager));
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010050}
51
David Beck263e3492018-11-09 14:46:40 +000052IBackendInternal::IBackendContextPtr RefBackend::CreateBackendContext(const IRuntime::CreationOptions&) const
53{
54 return IBackendContextPtr{};
55}
56
Colm Donelane49755b2020-01-29 15:22:43 +000057IBackendInternal::IBackendProfilingContextPtr RefBackend::CreateBackendProfilingContext(
Colm Donelan1aff3932020-02-05 17:48:59 +000058 const IRuntime::CreationOptions&, IBackendProfilingPtr&)
Colm Donelane49755b2020-01-29 15:22:43 +000059{
60 return IBackendProfilingContextPtr{};
61}
62
Aron Virginas-Tar56055192018-11-12 18:10:43 +000063IBackendInternal::IMemoryManagerUniquePtr RefBackend::CreateMemoryManager() const
64{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010065 return std::make_unique<RefMemoryManager>();
Aron Virginas-Tar56055192018-11-12 18:10:43 +000066}
67
David Beck111b5d92018-11-12 14:59:37 +000068IBackendInternal::ILayerSupportSharedPtr RefBackend::GetLayerSupport() const
69{
70 static ILayerSupportSharedPtr layerSupport{new RefLayerSupport};
71 return layerSupport;
72}
73
Cathal Corbett3883b272022-07-22 16:03:36 +010074OptimizationViews RefBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
75 const ModelOptions& modelOptions) const
Matteo Martincighadddddb2019-01-24 14:06:23 +000076{
Cathal Corbett3883b272022-07-22 16:03:36 +010077 OptimizationViews optimizationViews(modelOptions);
Matteo Martincighadddddb2019-01-24 14:06:23 +000078
Cathal Corbett3883b272022-07-22 16:03:36 +010079 auto it = subgraph.endIConnectable();
80 std::map<LayerGuid, Layer*> untouched;
81
82 while (it != subgraph.beginIConnectable())
83 {
84 --it;
85 Layer& base = *(PolymorphicDowncast<Layer*>(*it));
86 untouched.insert({base.GetGuid(), &base});
87 }
88
89 it = subgraph.endIConnectable();
90 while (it != subgraph.beginIConnectable())
91 {
92 --it;
93 Layer& base = *(PolymorphicDowncast<Layer*>(*it));
94
95 // Special case to fuse padding into average pooling 2d for quantized datatype.
96 // Required to be done as a backend specific optimization as Neon does not support this special case.
97 if (base.GetType() == LayerType::Pooling2d)
98 {
99 Pooling2dLayer* baseLayer = PolymorphicDowncast<Pooling2dLayer*>(&base);
100 Pooling2dDescriptor poolingDescriptor = baseLayer->GetParameters();
101
102 if (baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer().GetType() == LayerType::Pad)
103 {
104 PadLayer* padLayer = PolymorphicDowncast<PadLayer*>(
105 &baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer());
106 if (padLayer->GetOutputSlot(0).GetNumConnections() == 1 &&
107 optimizations::pad_fold::TryFoldPadIntoLayer2d(padLayer->GetParameters(),
108 poolingDescriptor,
109 padLayer->GetOutputSlot().GetTensorInfo(),
110 true))
111 {
112 FoldPadIntoAveragePool2d<Pooling2dLayer>(optimizationViews, baseLayer,
113 poolingDescriptor, padLayer);
114 untouched.erase(baseLayer->GetGuid());
115 untouched.erase(padLayer->GetGuid());
116 }
117 }
118 }
119 }
120
121 if (optimizationViews.GetSubstitutions().empty())
122 {
123 optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph));
124 }
125 else
126 {
127 ReportUntouchedLayers(optimizationViews, untouched);
128 }
Matteo Martincighc3ba50e2019-05-22 14:28:16 +0100129
130 return optimizationViews;
Matteo Martincighadddddb2019-01-24 14:06:23 +0000131}
132
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100133std::vector<ITensorHandleFactory::FactoryId> RefBackend::GetHandleFactoryPreferences() const
134{
135 return std::vector<ITensorHandleFactory::FactoryId> { RefTensorHandleFactory::GetIdStatic() };
136}
137
138void RefBackend::RegisterTensorHandleFactories(class TensorHandleFactoryRegistry& registry)
139{
140 auto memoryManager = std::make_shared<RefMemoryManager>();
141
142 registry.RegisterMemoryManager(memoryManager);
Narumol Prangnawarat1c52a382022-01-13 11:47:35 +0000143
144 std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager);
145
146 // Register copy and import factory pair
147 registry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId());
148 // Register the factory
149 registry.RegisterFactory(std::move(factory));
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100150}
151
Francis Murtaghe8d7ccb2021-10-14 17:30:24 +0100152std::unique_ptr<ICustomAllocator> RefBackend::GetDefaultAllocator() const
153{
154 return std::make_unique<DefaultAllocator>();
155}
156
Matthew Sloyan2d213a72022-06-30 17:13:04 +0100157ExecutionData RefBackend::CreateExecutionData(WorkingMemDescriptor& workingMemDescriptor) const
158{
159 ExecutionData executionData;
160 executionData.m_Data = &workingMemDescriptor;
161 return executionData;
162}
163
164void RefBackend::UpdateExecutionData(ExecutionData& executionData, WorkingMemDescriptor& workingMemDescriptor) const
165{
166 executionData.m_Data = &workingMemDescriptor;
167}
168
Matteo Martincighadddddb2019-01-24 14:06:23 +0000169} // namespace armnn