blob: fe11fd259c5890d47ba13d6e62c0f4edcd25f12b [file] [log] [blame]
arovir014424b0a2018-10-04 10:46:04 +01001//
Mike Kelly4cc341c2023-07-07 15:43:06 +01002// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved.
arovir014424b0a2018-10-04 10:46:04 +01003// SPDX-License-Identifier: MIT
4//
5
6#include "RefBackend.hpp"
David Beck3e9e1152018-10-17 14:17:50 +01007#include "RefBackendId.hpp"
arovir01a0944792018-10-11 15:00:58 +01008#include "RefWorkloadFactory.hpp"
David Beck111b5d92018-11-12 14:59:37 +00009#include "RefLayerSupport.hpp"
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010010#include "RefTensorHandleFactory.hpp"
arovir01a0944792018-10-11 15:00:58 +010011
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000013#include <armnn/backends/IBackendContext.hpp>
14#include <armnn/backends/IMemoryManager.hpp>
Jan Eilers3c9e0452020-04-10 13:00:44 +010015#include <armnn/utility/PolymorphicDowncast.hpp>
Francis Murtaghe8d7ccb2021-10-14 17:30:24 +010016#include <backendsCommon/DefaultAllocator.hpp>
Cathal Corbett3883b272022-07-22 16:03:36 +010017#include <backendsCommon/SubgraphUtils.hpp>
David Beck111b5d92018-11-12 14:59:37 +000018
arovir014424b0a2018-10-04 10:46:04 +010019namespace armnn
20{
21
David Beck3cc9a622018-10-12 10:38:31 +010022const BackendId& RefBackend::GetIdStatic()
arovir014424b0a2018-10-04 10:46:04 +010023{
David Beck3e9e1152018-10-17 14:17:50 +010024 static const BackendId s_Id{RefBackendId()};
arovir014424b0a2018-10-04 10:46:04 +010025 return s_Id;
26}
27
Aron Virginas-Tar56055192018-11-12 18:10:43 +000028IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory(
29 const IBackendInternal::IMemoryManagerSharedPtr& memoryManager) const
arovir014424b0a2018-10-04 10:46:04 +010030{
Jan Eilers3c9e0452020-04-10 13:00:44 +010031 return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager));
arovir014424b0a2018-10-04 10:46:04 +010032}
33
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010034IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory(
35 class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry) const
36{
37 auto memoryManager = std::make_shared<RefMemoryManager>();
38
39 tensorHandleFactoryRegistry.RegisterMemoryManager(memoryManager);
Narumol Prangnawarat1c52a382022-01-13 11:47:35 +000040
41 std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager);
42 // Register copy and import factory pair
43 tensorHandleFactoryRegistry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId());
44 // Register the factory
45 tensorHandleFactoryRegistry.RegisterFactory(std::move(factory));
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010046
Jan Eilers3c9e0452020-04-10 13:00:44 +010047 return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager));
Ferran Balaguerbfeb2712019-08-07 15:14:56 +010048}
49
David Beck263e3492018-11-09 14:46:40 +000050IBackendInternal::IBackendContextPtr RefBackend::CreateBackendContext(const IRuntime::CreationOptions&) const
51{
52 return IBackendContextPtr{};
53}
54
Colm Donelane49755b2020-01-29 15:22:43 +000055IBackendInternal::IBackendProfilingContextPtr RefBackend::CreateBackendProfilingContext(
Colm Donelan1aff3932020-02-05 17:48:59 +000056 const IRuntime::CreationOptions&, IBackendProfilingPtr&)
Colm Donelane49755b2020-01-29 15:22:43 +000057{
58 return IBackendProfilingContextPtr{};
59}
60
Aron Virginas-Tar56055192018-11-12 18:10:43 +000061IBackendInternal::IMemoryManagerUniquePtr RefBackend::CreateMemoryManager() const
62{
Matthew Bentham7c1603a2019-06-21 17:22:23 +010063 return std::make_unique<RefMemoryManager>();
Aron Virginas-Tar56055192018-11-12 18:10:43 +000064}
65
David Beck111b5d92018-11-12 14:59:37 +000066IBackendInternal::ILayerSupportSharedPtr RefBackend::GetLayerSupport() const
67{
68 static ILayerSupportSharedPtr layerSupport{new RefLayerSupport};
69 return layerSupport;
70}
71
Cathal Corbett3883b272022-07-22 16:03:36 +010072OptimizationViews RefBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
73 const ModelOptions& modelOptions) const
Matteo Martincighadddddb2019-01-24 14:06:23 +000074{
Cathal Corbett3883b272022-07-22 16:03:36 +010075 OptimizationViews optimizationViews(modelOptions);
Matteo Martincighadddddb2019-01-24 14:06:23 +000076
Francis Murtagh0f3e9a02023-07-28 14:29:46 +010077 auto it = subgraph.end();
Cathal Corbett3883b272022-07-22 16:03:36 +010078 std::map<LayerGuid, Layer*> untouched;
79
Francis Murtagh0f3e9a02023-07-28 14:29:46 +010080 while (it != subgraph.begin())
Cathal Corbett3883b272022-07-22 16:03:36 +010081 {
82 --it;
83 Layer& base = *(PolymorphicDowncast<Layer*>(*it));
84 untouched.insert({base.GetGuid(), &base});
85 }
86
Francis Murtagh0f3e9a02023-07-28 14:29:46 +010087 it = subgraph.end();
88 while (it != subgraph.begin())
Cathal Corbett3883b272022-07-22 16:03:36 +010089 {
90 --it;
91 Layer& base = *(PolymorphicDowncast<Layer*>(*it));
92
93 // Special case to fuse padding into average pooling 2d for quantized datatype.
94 // Required to be done as a backend specific optimization as Neon does not support this special case.
95 if (base.GetType() == LayerType::Pooling2d)
96 {
97 Pooling2dLayer* baseLayer = PolymorphicDowncast<Pooling2dLayer*>(&base);
98 Pooling2dDescriptor poolingDescriptor = baseLayer->GetParameters();
99
100 if (baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer().GetType() == LayerType::Pad)
101 {
102 PadLayer* padLayer = PolymorphicDowncast<PadLayer*>(
103 &baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer());
104 if (padLayer->GetOutputSlot(0).GetNumConnections() == 1 &&
105 optimizations::pad_fold::TryFoldPadIntoLayer2d(padLayer->GetParameters(),
106 poolingDescriptor,
107 padLayer->GetOutputSlot().GetTensorInfo(),
108 true))
109 {
110 FoldPadIntoAveragePool2d<Pooling2dLayer>(optimizationViews, baseLayer,
111 poolingDescriptor, padLayer);
112 untouched.erase(baseLayer->GetGuid());
113 untouched.erase(padLayer->GetGuid());
114 }
115 }
116 }
Mike Kelly4cc341c2023-07-07 15:43:06 +0100117
118 // Remove Reshape where possible
119 if (base.GetType() == LayerType::Reshape)
120 {
121 ReshapeLayer* baseLayer = PolymorphicDowncast<ReshapeLayer*>(&base);
122 RemoveReshapeLayer(baseLayer, untouched, optimizationViews);
123 }
Cathal Corbett3883b272022-07-22 16:03:36 +0100124 }
125
Mike Kelly4cc341c2023-07-07 15:43:06 +0100126 if (optimizationViews.GetSubstitutions().empty() && optimizationViews.GetDeletedSubgraphs().empty())
Cathal Corbett3883b272022-07-22 16:03:36 +0100127 {
128 optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph));
129 }
130 else
131 {
132 ReportUntouchedLayers(optimizationViews, untouched);
133 }
Matteo Martincighc3ba50e2019-05-22 14:28:16 +0100134
135 return optimizationViews;
Matteo Martincighadddddb2019-01-24 14:06:23 +0000136}
137
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100138std::vector<ITensorHandleFactory::FactoryId> RefBackend::GetHandleFactoryPreferences() const
139{
140 return std::vector<ITensorHandleFactory::FactoryId> { RefTensorHandleFactory::GetIdStatic() };
141}
142
143void RefBackend::RegisterTensorHandleFactories(class TensorHandleFactoryRegistry& registry)
144{
145 auto memoryManager = std::make_shared<RefMemoryManager>();
146
147 registry.RegisterMemoryManager(memoryManager);
Narumol Prangnawarat1c52a382022-01-13 11:47:35 +0000148
149 std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager);
150
151 // Register copy and import factory pair
152 registry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId());
153 // Register the factory
154 registry.RegisterFactory(std::move(factory));
Ferran Balaguerbfeb2712019-08-07 15:14:56 +0100155}
156
Francis Murtaghe8d7ccb2021-10-14 17:30:24 +0100157std::unique_ptr<ICustomAllocator> RefBackend::GetDefaultAllocator() const
158{
159 return std::make_unique<DefaultAllocator>();
160}
161
Matthew Sloyan2d213a72022-06-30 17:13:04 +0100162ExecutionData RefBackend::CreateExecutionData(WorkingMemDescriptor& workingMemDescriptor) const
163{
164 ExecutionData executionData;
165 executionData.m_Data = &workingMemDescriptor;
166 return executionData;
167}
168
169void RefBackend::UpdateExecutionData(ExecutionData& executionData, WorkingMemDescriptor& workingMemDescriptor) const
170{
171 executionData.m_Data = &workingMemDescriptor;
172}
173
Matteo Martincighadddddb2019-01-24 14:06:23 +0000174} // namespace armnn