arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 1 | // |
Matthew Sloyan | 2d213a7 | 2022-06-30 17:13:04 +0100 | [diff] [blame] | 2 | // Copyright © 2022 Arm Ltd and Contributors. All rights reserved. |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
| 6 | #include "RefBackend.hpp" |
David Beck | 3e9e115 | 2018-10-17 14:17:50 +0100 | [diff] [blame] | 7 | #include "RefBackendId.hpp" |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 8 | #include "RefWorkloadFactory.hpp" |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 9 | #include "RefLayerSupport.hpp" |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 10 | #include "RefTensorHandleFactory.hpp" |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 11 | |
Matteo Martincigh | c601aa6 | 2019-10-29 15:03:22 +0000 | [diff] [blame] | 12 | #include <armnn/BackendRegistry.hpp> |
Matteo Martincigh | e5b8eb9 | 2019-11-28 15:45:42 +0000 | [diff] [blame] | 13 | #include <armnn/backends/IBackendContext.hpp> |
| 14 | #include <armnn/backends/IMemoryManager.hpp> |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 15 | #include <armnn/utility/PolymorphicDowncast.hpp> |
Francis Murtagh | e8d7ccb | 2021-10-14 17:30:24 +0100 | [diff] [blame] | 16 | #include <backendsCommon/DefaultAllocator.hpp> |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 17 | #include <backendsCommon/SubgraphUtils.hpp> |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 18 | |
David Beck | 263e349 | 2018-11-09 14:46:40 +0000 | [diff] [blame] | 19 | #include <Optimizer.hpp> |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 20 | |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 21 | namespace armnn |
| 22 | { |
| 23 | |
David Beck | 3cc9a62 | 2018-10-12 10:38:31 +0100 | [diff] [blame] | 24 | const BackendId& RefBackend::GetIdStatic() |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 25 | { |
David Beck | 3e9e115 | 2018-10-17 14:17:50 +0100 | [diff] [blame] | 26 | static const BackendId s_Id{RefBackendId()}; |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 27 | return s_Id; |
| 28 | } |
| 29 | |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 30 | IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory( |
| 31 | const IBackendInternal::IMemoryManagerSharedPtr& memoryManager) const |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 32 | { |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 33 | return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager)); |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 34 | } |
| 35 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 36 | IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory( |
| 37 | class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry) const |
| 38 | { |
| 39 | auto memoryManager = std::make_shared<RefMemoryManager>(); |
| 40 | |
| 41 | tensorHandleFactoryRegistry.RegisterMemoryManager(memoryManager); |
Narumol Prangnawarat | 1c52a38 | 2022-01-13 11:47:35 +0000 | [diff] [blame] | 42 | |
| 43 | std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager); |
| 44 | // Register copy and import factory pair |
| 45 | tensorHandleFactoryRegistry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId()); |
| 46 | // Register the factory |
| 47 | tensorHandleFactoryRegistry.RegisterFactory(std::move(factory)); |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 48 | |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 49 | return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager)); |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 50 | } |
| 51 | |
David Beck | 263e349 | 2018-11-09 14:46:40 +0000 | [diff] [blame] | 52 | IBackendInternal::IBackendContextPtr RefBackend::CreateBackendContext(const IRuntime::CreationOptions&) const |
| 53 | { |
| 54 | return IBackendContextPtr{}; |
| 55 | } |
| 56 | |
Colm Donelan | e49755b | 2020-01-29 15:22:43 +0000 | [diff] [blame] | 57 | IBackendInternal::IBackendProfilingContextPtr RefBackend::CreateBackendProfilingContext( |
Colm Donelan | 1aff393 | 2020-02-05 17:48:59 +0000 | [diff] [blame] | 58 | const IRuntime::CreationOptions&, IBackendProfilingPtr&) |
Colm Donelan | e49755b | 2020-01-29 15:22:43 +0000 | [diff] [blame] | 59 | { |
| 60 | return IBackendProfilingContextPtr{}; |
| 61 | } |
| 62 | |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 63 | IBackendInternal::IMemoryManagerUniquePtr RefBackend::CreateMemoryManager() const |
| 64 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 65 | return std::make_unique<RefMemoryManager>(); |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 66 | } |
| 67 | |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 68 | IBackendInternal::ILayerSupportSharedPtr RefBackend::GetLayerSupport() const |
| 69 | { |
| 70 | static ILayerSupportSharedPtr layerSupport{new RefLayerSupport}; |
| 71 | return layerSupport; |
| 72 | } |
| 73 | |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 74 | OptimizationViews RefBackend::OptimizeSubgraphView(const SubgraphView& subgraph, |
| 75 | const ModelOptions& modelOptions) const |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 76 | { |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 77 | OptimizationViews optimizationViews(modelOptions); |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 78 | |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 79 | auto it = subgraph.endIConnectable(); |
| 80 | std::map<LayerGuid, Layer*> untouched; |
| 81 | |
| 82 | while (it != subgraph.beginIConnectable()) |
| 83 | { |
| 84 | --it; |
| 85 | Layer& base = *(PolymorphicDowncast<Layer*>(*it)); |
| 86 | untouched.insert({base.GetGuid(), &base}); |
| 87 | } |
| 88 | |
| 89 | it = subgraph.endIConnectable(); |
| 90 | while (it != subgraph.beginIConnectable()) |
| 91 | { |
| 92 | --it; |
| 93 | Layer& base = *(PolymorphicDowncast<Layer*>(*it)); |
| 94 | |
| 95 | // Special case to fuse padding into average pooling 2d for quantized datatype. |
| 96 | // Required to be done as a backend specific optimization as Neon does not support this special case. |
| 97 | if (base.GetType() == LayerType::Pooling2d) |
| 98 | { |
| 99 | Pooling2dLayer* baseLayer = PolymorphicDowncast<Pooling2dLayer*>(&base); |
| 100 | Pooling2dDescriptor poolingDescriptor = baseLayer->GetParameters(); |
| 101 | |
| 102 | if (baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer().GetType() == LayerType::Pad) |
| 103 | { |
| 104 | PadLayer* padLayer = PolymorphicDowncast<PadLayer*>( |
| 105 | &baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer()); |
| 106 | if (padLayer->GetOutputSlot(0).GetNumConnections() == 1 && |
| 107 | optimizations::pad_fold::TryFoldPadIntoLayer2d(padLayer->GetParameters(), |
| 108 | poolingDescriptor, |
| 109 | padLayer->GetOutputSlot().GetTensorInfo(), |
| 110 | true)) |
| 111 | { |
| 112 | FoldPadIntoAveragePool2d<Pooling2dLayer>(optimizationViews, baseLayer, |
| 113 | poolingDescriptor, padLayer); |
| 114 | untouched.erase(baseLayer->GetGuid()); |
| 115 | untouched.erase(padLayer->GetGuid()); |
| 116 | } |
| 117 | } |
| 118 | } |
| 119 | } |
| 120 | |
| 121 | if (optimizationViews.GetSubstitutions().empty()) |
| 122 | { |
| 123 | optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph)); |
| 124 | } |
| 125 | else |
| 126 | { |
| 127 | ReportUntouchedLayers(optimizationViews, untouched); |
| 128 | } |
Matteo Martincigh | c3ba50e | 2019-05-22 14:28:16 +0100 | [diff] [blame] | 129 | |
| 130 | return optimizationViews; |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 131 | } |
| 132 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 133 | std::vector<ITensorHandleFactory::FactoryId> RefBackend::GetHandleFactoryPreferences() const |
| 134 | { |
| 135 | return std::vector<ITensorHandleFactory::FactoryId> { RefTensorHandleFactory::GetIdStatic() }; |
| 136 | } |
| 137 | |
| 138 | void RefBackend::RegisterTensorHandleFactories(class TensorHandleFactoryRegistry& registry) |
| 139 | { |
| 140 | auto memoryManager = std::make_shared<RefMemoryManager>(); |
| 141 | |
| 142 | registry.RegisterMemoryManager(memoryManager); |
Narumol Prangnawarat | 1c52a38 | 2022-01-13 11:47:35 +0000 | [diff] [blame] | 143 | |
| 144 | std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager); |
| 145 | |
| 146 | // Register copy and import factory pair |
| 147 | registry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId()); |
| 148 | // Register the factory |
| 149 | registry.RegisterFactory(std::move(factory)); |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 150 | } |
| 151 | |
Francis Murtagh | e8d7ccb | 2021-10-14 17:30:24 +0100 | [diff] [blame] | 152 | std::unique_ptr<ICustomAllocator> RefBackend::GetDefaultAllocator() const |
| 153 | { |
| 154 | return std::make_unique<DefaultAllocator>(); |
| 155 | } |
| 156 | |
Matthew Sloyan | 2d213a7 | 2022-06-30 17:13:04 +0100 | [diff] [blame] | 157 | ExecutionData RefBackend::CreateExecutionData(WorkingMemDescriptor& workingMemDescriptor) const |
| 158 | { |
| 159 | ExecutionData executionData; |
| 160 | executionData.m_Data = &workingMemDescriptor; |
| 161 | return executionData; |
| 162 | } |
| 163 | |
| 164 | void RefBackend::UpdateExecutionData(ExecutionData& executionData, WorkingMemDescriptor& workingMemDescriptor) const |
| 165 | { |
| 166 | executionData.m_Data = &workingMemDescriptor; |
| 167 | } |
| 168 | |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 169 | } // namespace armnn |