arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 1 | // |
Mike Kelly | 4cc341c | 2023-07-07 15:43:06 +0100 | [diff] [blame] | 2 | // Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
| 6 | #include "RefBackend.hpp" |
David Beck | 3e9e115 | 2018-10-17 14:17:50 +0100 | [diff] [blame] | 7 | #include "RefBackendId.hpp" |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 8 | #include "RefWorkloadFactory.hpp" |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 9 | #include "RefLayerSupport.hpp" |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 10 | #include "RefTensorHandleFactory.hpp" |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 11 | |
Matteo Martincigh | c601aa6 | 2019-10-29 15:03:22 +0000 | [diff] [blame] | 12 | #include <armnn/BackendRegistry.hpp> |
Matteo Martincigh | e5b8eb9 | 2019-11-28 15:45:42 +0000 | [diff] [blame] | 13 | #include <armnn/backends/IBackendContext.hpp> |
| 14 | #include <armnn/backends/IMemoryManager.hpp> |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 15 | #include <armnn/utility/PolymorphicDowncast.hpp> |
Francis Murtagh | e8d7ccb | 2021-10-14 17:30:24 +0100 | [diff] [blame] | 16 | #include <backendsCommon/DefaultAllocator.hpp> |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 17 | #include <backendsCommon/SubgraphUtils.hpp> |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 18 | |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 19 | namespace armnn |
| 20 | { |
| 21 | |
David Beck | 3cc9a62 | 2018-10-12 10:38:31 +0100 | [diff] [blame] | 22 | const BackendId& RefBackend::GetIdStatic() |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 23 | { |
David Beck | 3e9e115 | 2018-10-17 14:17:50 +0100 | [diff] [blame] | 24 | static const BackendId s_Id{RefBackendId()}; |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 25 | return s_Id; |
| 26 | } |
| 27 | |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 28 | IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory( |
| 29 | const IBackendInternal::IMemoryManagerSharedPtr& memoryManager) const |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 30 | { |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 31 | return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager)); |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 32 | } |
| 33 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 34 | IBackendInternal::IWorkloadFactoryPtr RefBackend::CreateWorkloadFactory( |
| 35 | class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry) const |
| 36 | { |
| 37 | auto memoryManager = std::make_shared<RefMemoryManager>(); |
| 38 | |
| 39 | tensorHandleFactoryRegistry.RegisterMemoryManager(memoryManager); |
Narumol Prangnawarat | 1c52a38 | 2022-01-13 11:47:35 +0000 | [diff] [blame] | 40 | |
| 41 | std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager); |
| 42 | // Register copy and import factory pair |
| 43 | tensorHandleFactoryRegistry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId()); |
| 44 | // Register the factory |
| 45 | tensorHandleFactoryRegistry.RegisterFactory(std::move(factory)); |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 46 | |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 47 | return std::make_unique<RefWorkloadFactory>(PolymorphicPointerDowncast<RefMemoryManager>(memoryManager)); |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 48 | } |
| 49 | |
David Beck | 263e349 | 2018-11-09 14:46:40 +0000 | [diff] [blame] | 50 | IBackendInternal::IBackendContextPtr RefBackend::CreateBackendContext(const IRuntime::CreationOptions&) const |
| 51 | { |
| 52 | return IBackendContextPtr{}; |
| 53 | } |
| 54 | |
Colm Donelan | e49755b | 2020-01-29 15:22:43 +0000 | [diff] [blame] | 55 | IBackendInternal::IBackendProfilingContextPtr RefBackend::CreateBackendProfilingContext( |
Colm Donelan | 1aff393 | 2020-02-05 17:48:59 +0000 | [diff] [blame] | 56 | const IRuntime::CreationOptions&, IBackendProfilingPtr&) |
Colm Donelan | e49755b | 2020-01-29 15:22:43 +0000 | [diff] [blame] | 57 | { |
| 58 | return IBackendProfilingContextPtr{}; |
| 59 | } |
| 60 | |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 61 | IBackendInternal::IMemoryManagerUniquePtr RefBackend::CreateMemoryManager() const |
| 62 | { |
Matthew Bentham | 7c1603a | 2019-06-21 17:22:23 +0100 | [diff] [blame] | 63 | return std::make_unique<RefMemoryManager>(); |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 64 | } |
| 65 | |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 66 | IBackendInternal::ILayerSupportSharedPtr RefBackend::GetLayerSupport() const |
| 67 | { |
| 68 | static ILayerSupportSharedPtr layerSupport{new RefLayerSupport}; |
| 69 | return layerSupport; |
| 70 | } |
| 71 | |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 72 | OptimizationViews RefBackend::OptimizeSubgraphView(const SubgraphView& subgraph, |
| 73 | const ModelOptions& modelOptions) const |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 74 | { |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 75 | OptimizationViews optimizationViews(modelOptions); |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 76 | |
Francis Murtagh | 0f3e9a0 | 2023-07-28 14:29:46 +0100 | [diff] [blame] | 77 | auto it = subgraph.end(); |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 78 | std::map<LayerGuid, Layer*> untouched; |
| 79 | |
Francis Murtagh | 0f3e9a0 | 2023-07-28 14:29:46 +0100 | [diff] [blame] | 80 | while (it != subgraph.begin()) |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 81 | { |
| 82 | --it; |
| 83 | Layer& base = *(PolymorphicDowncast<Layer*>(*it)); |
| 84 | untouched.insert({base.GetGuid(), &base}); |
| 85 | } |
| 86 | |
Francis Murtagh | 0f3e9a0 | 2023-07-28 14:29:46 +0100 | [diff] [blame] | 87 | it = subgraph.end(); |
| 88 | while (it != subgraph.begin()) |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 89 | { |
| 90 | --it; |
| 91 | Layer& base = *(PolymorphicDowncast<Layer*>(*it)); |
| 92 | |
| 93 | // Special case to fuse padding into average pooling 2d for quantized datatype. |
| 94 | // Required to be done as a backend specific optimization as Neon does not support this special case. |
| 95 | if (base.GetType() == LayerType::Pooling2d) |
| 96 | { |
| 97 | Pooling2dLayer* baseLayer = PolymorphicDowncast<Pooling2dLayer*>(&base); |
| 98 | Pooling2dDescriptor poolingDescriptor = baseLayer->GetParameters(); |
| 99 | |
| 100 | if (baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer().GetType() == LayerType::Pad) |
| 101 | { |
| 102 | PadLayer* padLayer = PolymorphicDowncast<PadLayer*>( |
| 103 | &baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetOwningLayer()); |
| 104 | if (padLayer->GetOutputSlot(0).GetNumConnections() == 1 && |
| 105 | optimizations::pad_fold::TryFoldPadIntoLayer2d(padLayer->GetParameters(), |
| 106 | poolingDescriptor, |
| 107 | padLayer->GetOutputSlot().GetTensorInfo(), |
| 108 | true)) |
| 109 | { |
| 110 | FoldPadIntoAveragePool2d<Pooling2dLayer>(optimizationViews, baseLayer, |
| 111 | poolingDescriptor, padLayer); |
| 112 | untouched.erase(baseLayer->GetGuid()); |
| 113 | untouched.erase(padLayer->GetGuid()); |
| 114 | } |
| 115 | } |
| 116 | } |
Mike Kelly | 4cc341c | 2023-07-07 15:43:06 +0100 | [diff] [blame] | 117 | |
| 118 | // Remove Reshape where possible |
| 119 | if (base.GetType() == LayerType::Reshape) |
| 120 | { |
| 121 | ReshapeLayer* baseLayer = PolymorphicDowncast<ReshapeLayer*>(&base); |
| 122 | RemoveReshapeLayer(baseLayer, untouched, optimizationViews); |
| 123 | } |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 124 | } |
| 125 | |
Mike Kelly | 4cc341c | 2023-07-07 15:43:06 +0100 | [diff] [blame] | 126 | if (optimizationViews.GetSubstitutions().empty() && optimizationViews.GetDeletedSubgraphs().empty()) |
Cathal Corbett | 3883b27 | 2022-07-22 16:03:36 +0100 | [diff] [blame] | 127 | { |
| 128 | optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph)); |
| 129 | } |
| 130 | else |
| 131 | { |
| 132 | ReportUntouchedLayers(optimizationViews, untouched); |
| 133 | } |
Matteo Martincigh | c3ba50e | 2019-05-22 14:28:16 +0100 | [diff] [blame] | 134 | |
| 135 | return optimizationViews; |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 136 | } |
| 137 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 138 | std::vector<ITensorHandleFactory::FactoryId> RefBackend::GetHandleFactoryPreferences() const |
| 139 | { |
| 140 | return std::vector<ITensorHandleFactory::FactoryId> { RefTensorHandleFactory::GetIdStatic() }; |
| 141 | } |
| 142 | |
| 143 | void RefBackend::RegisterTensorHandleFactories(class TensorHandleFactoryRegistry& registry) |
| 144 | { |
| 145 | auto memoryManager = std::make_shared<RefMemoryManager>(); |
| 146 | |
| 147 | registry.RegisterMemoryManager(memoryManager); |
Narumol Prangnawarat | 1c52a38 | 2022-01-13 11:47:35 +0000 | [diff] [blame] | 148 | |
| 149 | std::unique_ptr<RefTensorHandleFactory> factory = std::make_unique<RefTensorHandleFactory>(memoryManager); |
| 150 | |
| 151 | // Register copy and import factory pair |
| 152 | registry.RegisterCopyAndImportFactoryPair(factory->GetId(), factory->GetId()); |
| 153 | // Register the factory |
| 154 | registry.RegisterFactory(std::move(factory)); |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 155 | } |
| 156 | |
Francis Murtagh | e8d7ccb | 2021-10-14 17:30:24 +0100 | [diff] [blame] | 157 | std::unique_ptr<ICustomAllocator> RefBackend::GetDefaultAllocator() const |
| 158 | { |
| 159 | return std::make_unique<DefaultAllocator>(); |
| 160 | } |
| 161 | |
Matthew Sloyan | 2d213a7 | 2022-06-30 17:13:04 +0100 | [diff] [blame] | 162 | ExecutionData RefBackend::CreateExecutionData(WorkingMemDescriptor& workingMemDescriptor) const |
| 163 | { |
| 164 | ExecutionData executionData; |
| 165 | executionData.m_Data = &workingMemDescriptor; |
| 166 | return executionData; |
| 167 | } |
| 168 | |
| 169 | void RefBackend::UpdateExecutionData(ExecutionData& executionData, WorkingMemDescriptor& workingMemDescriptor) const |
| 170 | { |
| 171 | executionData.m_Data = &workingMemDescriptor; |
| 172 | } |
| 173 | |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 174 | } // namespace armnn |