arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
| 6 | #include "NeonBackend.hpp" |
David Beck | 3e9e115 | 2018-10-17 14:17:50 +0100 | [diff] [blame] | 7 | #include "NeonBackendId.hpp" |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 8 | #include "NeonBackendModelContext.hpp" |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 9 | #include "NeonWorkloadFactory.hpp" |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 10 | #include "NeonLayerSupport.hpp" |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 11 | #include "NeonTensorHandleFactory.hpp" |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 12 | |
Matteo Martincigh | c601aa6 | 2019-10-29 15:03:22 +0000 | [diff] [blame] | 13 | #include <armnn/BackendRegistry.hpp> |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 14 | #include <armnn/Descriptors.hpp> |
Matteo Martincigh | c601aa6 | 2019-10-29 15:03:22 +0000 | [diff] [blame] | 15 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 16 | #include <aclCommon/ArmComputeSubgraphUtils.hpp> |
| 17 | #include <aclCommon/ArmComputeUtils.hpp> |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 18 | #include <aclCommon/BaseMemoryManager.hpp> |
| 19 | |
Matteo Martincigh | e5b8eb9 | 2019-11-28 15:45:42 +0000 | [diff] [blame] | 20 | #include <armnn/backends/IBackendContext.hpp> |
| 21 | #include <armnn/backends/IMemoryManager.hpp> |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 22 | |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 23 | #include <armnn/utility/PolymorphicDowncast.hpp> |
| 24 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 25 | #include "workloads/NeonAdditionWorkload.hpp" |
| 26 | #include "workloads/NeonBatchNormalizationWorkload.hpp" |
| 27 | #include "workloads/NeonConvolution2dWorkload.hpp" |
| 28 | #include "workloads/NeonDepthwiseConvolutionWorkload.hpp" |
| 29 | #include "workloads/NeonDivisionWorkload.hpp" |
| 30 | #include "workloads/NeonFullyConnectedWorkload.hpp" |
| 31 | #include "workloads/NeonMultiplicationWorkload.hpp" |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 32 | #include "workloads/NeonReduceWorkload.hpp" |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 33 | #include "workloads/NeonSubtractionWorkload.hpp" |
| 34 | |
David Beck | 263e349 | 2018-11-09 14:46:40 +0000 | [diff] [blame] | 35 | #include <Optimizer.hpp> |
arovir01 | a094479 | 2018-10-11 15:00:58 +0100 | [diff] [blame] | 36 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 37 | #include <arm_compute/core/Types.h> |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 38 | #include <arm_compute/runtime/Allocator.h> |
| 39 | |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 40 | namespace armnn |
| 41 | { |
| 42 | |
David Beck | 3cc9a62 | 2018-10-12 10:38:31 +0100 | [diff] [blame] | 43 | const BackendId& NeonBackend::GetIdStatic() |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 44 | { |
David Beck | 3e9e115 | 2018-10-17 14:17:50 +0100 | [diff] [blame] | 45 | static const BackendId s_Id{NeonBackendId()}; |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 46 | return s_Id; |
| 47 | } |
| 48 | |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 49 | IBackendInternal::IMemoryManagerUniquePtr NeonBackend::CreateMemoryManager() const |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 50 | { |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 51 | return std::make_unique<NeonMemoryManager>(std::make_unique<arm_compute::Allocator>(), |
Sadik Armagan | 13a9fa6 | 2019-04-26 16:04:34 +0100 | [diff] [blame] | 52 | BaseMemoryManager::MemoryAffinity::Offset); |
Aron Virginas-Tar | 5605519 | 2018-11-12 18:10:43 +0000 | [diff] [blame] | 53 | } |
| 54 | |
| 55 | IBackendInternal::IWorkloadFactoryPtr NeonBackend::CreateWorkloadFactory( |
| 56 | const IBackendInternal::IMemoryManagerSharedPtr& memoryManager) const |
| 57 | { |
| 58 | return std::make_unique<NeonWorkloadFactory>( |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 59 | PolymorphicPointerDowncast<NeonMemoryManager>(memoryManager)); |
arovir01 | 4424b0a | 2018-10-04 10:46:04 +0100 | [diff] [blame] | 60 | } |
| 61 | |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 62 | IBackendInternal::IWorkloadFactoryPtr NeonBackend::CreateWorkloadFactory( |
Sadik Armagan | 04a7297 | 2020-09-14 15:44:18 +0100 | [diff] [blame] | 63 | const IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const ModelOptions& modelOptions) const |
| 64 | { |
| 65 | return std::make_unique<NeonWorkloadFactory>( |
| 66 | PolymorphicPointerDowncast<NeonMemoryManager>(memoryManager), CreateBackendSpecificModelContext(modelOptions)); |
| 67 | } |
| 68 | |
| 69 | IBackendInternal::IWorkloadFactoryPtr NeonBackend::CreateWorkloadFactory( |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 70 | class TensorHandleFactoryRegistry& tensorHandleFactoryRegistry) const |
| 71 | { |
| 72 | auto memoryManager = std::make_shared<NeonMemoryManager>(std::make_unique<arm_compute::Allocator>(), |
| 73 | BaseMemoryManager::MemoryAffinity::Offset); |
| 74 | |
| 75 | tensorHandleFactoryRegistry.RegisterMemoryManager(memoryManager); |
Narumol Prangnawarat | 549cb7a | 2020-07-10 17:50:53 +0100 | [diff] [blame] | 76 | tensorHandleFactoryRegistry.RegisterFactory(std::make_unique<NeonTensorHandleFactory>(memoryManager)); |
| 77 | |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 78 | return std::make_unique<NeonWorkloadFactory>( |
Jan Eilers | 3c9e045 | 2020-04-10 13:00:44 +0100 | [diff] [blame] | 79 | PolymorphicPointerDowncast<NeonMemoryManager>(memoryManager)); |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 80 | } |
| 81 | |
Sadik Armagan | 04a7297 | 2020-09-14 15:44:18 +0100 | [diff] [blame] | 82 | IBackendInternal::IWorkloadFactoryPtr NeonBackend::CreateWorkloadFactory( |
| 83 | TensorHandleFactoryRegistry& tensorHandleFactoryRegistry, const ModelOptions& modelOptions) const |
| 84 | { |
| 85 | auto memoryManager = std::make_shared<NeonMemoryManager>(std::make_unique<arm_compute::Allocator>(), |
| 86 | BaseMemoryManager::MemoryAffinity::Offset); |
| 87 | |
| 88 | tensorHandleFactoryRegistry.RegisterMemoryManager(memoryManager); |
| 89 | tensorHandleFactoryRegistry.RegisterFactory(std::make_unique<NeonTensorHandleFactory>(memoryManager)); |
| 90 | |
| 91 | return std::make_unique<NeonWorkloadFactory>( |
| 92 | PolymorphicPointerDowncast<NeonMemoryManager>(memoryManager), CreateBackendSpecificModelContext(modelOptions)); |
| 93 | } |
| 94 | |
David Beck | 263e349 | 2018-11-09 14:46:40 +0000 | [diff] [blame] | 95 | IBackendInternal::IBackendContextPtr NeonBackend::CreateBackendContext(const IRuntime::CreationOptions&) const |
| 96 | { |
| 97 | return IBackendContextPtr{}; |
| 98 | } |
| 99 | |
Colm Donelan | e49755b | 2020-01-29 15:22:43 +0000 | [diff] [blame] | 100 | IBackendInternal::IBackendProfilingContextPtr NeonBackend::CreateBackendProfilingContext( |
Colm Donelan | 1aff393 | 2020-02-05 17:48:59 +0000 | [diff] [blame] | 101 | const IRuntime::CreationOptions&, IBackendProfilingPtr&) |
Colm Donelan | e49755b | 2020-01-29 15:22:43 +0000 | [diff] [blame] | 102 | { |
| 103 | return IBackendProfilingContextPtr{}; |
| 104 | } |
| 105 | |
David Beck | 263e349 | 2018-11-09 14:46:40 +0000 | [diff] [blame] | 106 | IBackendInternal::Optimizations NeonBackend::GetOptimizations() const |
| 107 | { |
| 108 | return Optimizations{}; |
| 109 | } |
| 110 | |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 111 | IBackendInternal::IBackendSpecificModelContextPtr NeonBackend::CreateBackendSpecificModelContext( |
| 112 | const ModelOptions& modelOptions) const |
| 113 | { |
| 114 | return IBackendSpecificModelContextPtr{new NeonBackendModelContext{modelOptions}}; |
| 115 | } |
| 116 | |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 117 | IBackendInternal::ILayerSupportSharedPtr NeonBackend::GetLayerSupport() const |
| 118 | { |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 119 | static ILayerSupportSharedPtr layerSupport |
| 120 | { |
| 121 | new NeonLayerSupport(IBackendInternal::IBackendSpecificModelContextPtr{}) |
| 122 | }; |
| 123 | return layerSupport; |
| 124 | } |
| 125 | |
| 126 | IBackendInternal::ILayerSupportSharedPtr NeonBackend::GetLayerSupport(const ModelOptions& modelOptions) const |
| 127 | { |
| 128 | static ILayerSupportSharedPtr layerSupport |
| 129 | { |
| 130 | new NeonLayerSupport(CreateBackendSpecificModelContext(modelOptions)) |
| 131 | }; |
David Beck | 111b5d9 | 2018-11-12 14:59:37 +0000 | [diff] [blame] | 132 | return layerSupport; |
| 133 | } |
| 134 | |
Matteo Martincigh | c3ba50e | 2019-05-22 14:28:16 +0100 | [diff] [blame] | 135 | OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph) const |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 136 | { |
Matteo Martincigh | c3ba50e | 2019-05-22 14:28:16 +0100 | [diff] [blame] | 137 | OptimizationViews optimizationViews; |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 138 | |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 139 | auto it = subgraph.end(); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 140 | std::map<LayerGuid, Layer*> untouched; |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 141 | |
| 142 | while (it != subgraph.begin()) |
| 143 | { |
| 144 | --it; |
| 145 | Layer& base = **it; |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 146 | untouched.insert({base.GetGuid(), &base}); |
| 147 | } |
| 148 | |
| 149 | it = subgraph.end(); |
| 150 | while (it != subgraph.begin()) |
| 151 | { |
| 152 | --it; |
| 153 | Layer& base = **it; |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 154 | |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 155 | // Fuse activation into previous layer if supported by backend |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 156 | if ((base.GetType() == LayerType::DepthwiseConvolution2d || base.GetType() == LayerType::Convolution2d |
| 157 | || base.GetType() == LayerType::BatchNormalization || base.GetType() == LayerType::FullyConnected |
| 158 | || base.GetType() == LayerType::Addition || base.GetType() == LayerType::Multiplication |
Matthew Sloyan | ae12306 | 2021-05-07 14:18:01 +0000 | [diff] [blame] | 159 | || base.GetType() == LayerType::Subtraction || base.GetType() == LayerType::Division) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 160 | && (base.GetAdditionalInformation<ActivationDescriptor>() == nullptr)) |
| 161 | { |
| 162 | for (auto output = base.BeginOutputSlots(); output != base.EndOutputSlots(); ++output) |
| 163 | { |
| 164 | if (output->GetNumConnections() == 1) |
| 165 | { |
| 166 | for (auto&& childInput : output->GetConnections()) |
| 167 | { |
Teresa Charlin | d672f5d | 2021-01-18 18:07:57 +0000 | [diff] [blame] | 168 | if ((childInput->GetOwningLayer().GetType() == LayerType::Activation) && |
| 169 | (checkDataTypeInputandOutput(childInput->GetOwningLayer()))) |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 170 | { |
| 171 | Layer& child = childInput->GetOwningLayer(); |
| 172 | |
| 173 | auto* activationLayer = PolymorphicDowncast<ActivationLayer*>(&child); |
| 174 | |
| 175 | const std::string name = std::string("fused-") + child.GetName() + std::string("-into-") + |
| 176 | base.GetName(); |
| 177 | |
| 178 | // Get params from activation layer |
| 179 | ActivationDescriptor activationDesc = activationLayer->GetParameters(); |
| 180 | |
| 181 | if (base.GetType() == LayerType::Convolution2d) |
| 182 | { |
| 183 | Convolution2dLayer* baseLayer = PolymorphicDowncast<Convolution2dLayer*>(&base); |
| 184 | |
| 185 | Optional<TensorInfo> biases; |
| 186 | |
| 187 | if (baseLayer->GetParameters().m_BiasEnabled) |
| 188 | { |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 189 | biases = baseLayer->m_Bias->GetTensorInfo(); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 190 | } |
| 191 | |
| 192 | arm_compute::Status status = NeonConvolution2dWorkloadValidate( |
| 193 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 194 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 195 | baseLayer->GetParameters(), |
| 196 | baseLayer->m_Weight->GetTensorInfo(), |
| 197 | biases, |
| 198 | false, |
| 199 | &activationDesc); |
| 200 | |
| 201 | if (status) |
| 202 | { |
| 203 | FuseLayerWithWeightsAndBiases<Convolution2dLayer>(optimizationViews, |
| 204 | baseLayer, |
| 205 | activationLayer, |
| 206 | activationDesc, |
| 207 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 208 | untouched.erase(baseLayer->GetGuid()); |
| 209 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 210 | } |
| 211 | } |
| 212 | else if (base.GetType() == LayerType::DepthwiseConvolution2d) |
| 213 | { |
| 214 | DepthwiseConvolution2dLayer* baseLayer = |
| 215 | PolymorphicDowncast<DepthwiseConvolution2dLayer*>(&base); |
| 216 | |
| 217 | Optional<TensorInfo> biases; |
| 218 | |
| 219 | if (baseLayer->GetParameters().m_BiasEnabled) |
| 220 | { |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 221 | biases = baseLayer->m_Bias->GetTensorInfo(); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 222 | } |
| 223 | |
| 224 | arm_compute::Status status = NeonDepthwiseConvolutionWorkloadValidate( |
| 225 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 226 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 227 | baseLayer->GetParameters(), |
| 228 | baseLayer->m_Weight->GetTensorInfo(), |
| 229 | biases, |
| 230 | &activationDesc); |
| 231 | |
| 232 | if (status) |
| 233 | { |
| 234 | FuseLayerWithWeightsAndBiases<DepthwiseConvolution2dLayer>(optimizationViews, |
| 235 | baseLayer, |
| 236 | activationLayer, |
| 237 | activationDesc, |
| 238 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 239 | untouched.erase(baseLayer->GetGuid()); |
| 240 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 241 | } |
| 242 | } |
| 243 | else if (base.GetType() == LayerType::FullyConnected) |
| 244 | { |
| 245 | FullyConnectedLayer* baseLayer = PolymorphicDowncast<FullyConnectedLayer*>(&base); |
| 246 | |
| 247 | arm_compute::Status status = NeonFullyConnectedWorkloadValidate( |
| 248 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 249 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 250 | baseLayer->m_Weight->GetTensorInfo(), |
| 251 | baseLayer->m_Bias->GetTensorInfo(), |
| 252 | baseLayer->GetParameters(), |
| 253 | &activationDesc); |
| 254 | |
| 255 | if (status) |
| 256 | { |
| 257 | FuseLayerWithWeightsAndBiases<FullyConnectedLayer>(optimizationViews, |
| 258 | baseLayer, |
| 259 | activationLayer, |
| 260 | activationDesc, |
| 261 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 262 | untouched.erase(baseLayer->GetGuid()); |
| 263 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 264 | } |
| 265 | } |
| 266 | else if (base.GetType() == LayerType::BatchNormalization) |
| 267 | { |
| 268 | BatchNormalizationLayer* baseLayer = |
| 269 | PolymorphicDowncast<BatchNormalizationLayer*>(&base); |
| 270 | |
| 271 | arm_compute::Status status = NeonBatchNormalizationValidate( |
| 272 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 273 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 274 | baseLayer->m_Mean->GetTensorInfo(), |
| 275 | baseLayer->m_Variance->GetTensorInfo(), |
| 276 | baseLayer->m_Beta->GetTensorInfo(), |
| 277 | baseLayer->m_Gamma->GetTensorInfo(), |
| 278 | baseLayer->GetParameters(), |
| 279 | &activationDesc); |
| 280 | |
| 281 | if (status) |
| 282 | { |
| 283 | BatchNormalizationLayer* replacementLayer = |
| 284 | FuseLayerWithParameters<BatchNormalizationLayer>( |
| 285 | optimizationViews, |
| 286 | baseLayer, |
| 287 | activationLayer, |
| 288 | activationDesc, |
| 289 | name); |
| 290 | |
| 291 | replacementLayer->m_Beta = std::move(baseLayer->m_Beta); |
| 292 | replacementLayer->m_Gamma = std::move(baseLayer->m_Gamma); |
| 293 | replacementLayer->m_Mean = std::move(baseLayer->m_Mean); |
| 294 | replacementLayer->m_Variance = std::move(baseLayer->m_Variance); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 295 | untouched.erase(baseLayer->GetGuid()); |
| 296 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 297 | } |
| 298 | } |
| 299 | else if (base.GetType() == LayerType::Addition) |
| 300 | { |
| 301 | AdditionLayer* baseLayer = PolymorphicDowncast<AdditionLayer*>(&base); |
| 302 | |
| 303 | arm_compute::Status status = NeonAdditionWorkloadValidate( |
| 304 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 305 | baseLayer->GetInputSlot(1).GetConnectedOutputSlot()->GetTensorInfo(), |
| 306 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 307 | &activationDesc); |
| 308 | |
| 309 | if (status) |
| 310 | { |
| 311 | FuseLayerWithoutParameters<AdditionLayer>(optimizationViews, |
| 312 | baseLayer, |
| 313 | activationLayer, |
| 314 | activationDesc, |
| 315 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 316 | untouched.erase(baseLayer->GetGuid()); |
| 317 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 318 | } |
| 319 | } |
| 320 | else if (base.GetType() == LayerType::Division) |
| 321 | { |
| 322 | DivisionLayer* baseLayer = PolymorphicDowncast<DivisionLayer*>(&base); |
| 323 | |
| 324 | arm_compute::Status status = NeonDivisionWorkloadValidate( |
| 325 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 326 | baseLayer->GetInputSlot(1).GetConnectedOutputSlot()->GetTensorInfo(), |
| 327 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 328 | &activationDesc); |
| 329 | |
| 330 | if (status) |
| 331 | { |
| 332 | FuseLayerWithoutParameters<DivisionLayer>(optimizationViews, |
| 333 | baseLayer, |
| 334 | activationLayer, |
| 335 | activationDesc, |
| 336 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 337 | untouched.erase(baseLayer->GetGuid()); |
| 338 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 339 | } |
| 340 | } |
| 341 | else if (base.GetType() == LayerType::Multiplication) |
| 342 | { |
| 343 | MultiplicationLayer* baseLayer = PolymorphicDowncast<MultiplicationLayer*>(&base); |
| 344 | |
| 345 | arm_compute::Status status = NeonMultiplicationWorkloadValidate( |
| 346 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 347 | baseLayer->GetInputSlot(1).GetConnectedOutputSlot()->GetTensorInfo(), |
| 348 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 349 | &activationDesc); |
| 350 | |
| 351 | if (status) |
| 352 | { |
| 353 | FuseLayerWithoutParameters<MultiplicationLayer>(optimizationViews, |
| 354 | baseLayer, |
| 355 | activationLayer, |
| 356 | activationDesc, |
| 357 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 358 | untouched.erase(baseLayer->GetGuid()); |
| 359 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 360 | } |
| 361 | } |
| 362 | else if (base.GetType() == LayerType::Subtraction) |
| 363 | { |
| 364 | SubtractionLayer* baseLayer = PolymorphicDowncast<SubtractionLayer*>(&base); |
| 365 | |
| 366 | arm_compute::Status status = NeonSubtractionWorkloadValidate( |
| 367 | baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 368 | baseLayer->GetInputSlot(1).GetConnectedOutputSlot()->GetTensorInfo(), |
| 369 | activationLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo(), |
| 370 | &activationDesc); |
| 371 | |
| 372 | if (status) |
| 373 | { |
| 374 | FuseLayerWithoutParameters<SubtractionLayer>(optimizationViews, |
| 375 | baseLayer, |
| 376 | activationLayer, |
| 377 | activationDesc, |
| 378 | name); |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 379 | untouched.erase(baseLayer->GetGuid()); |
| 380 | untouched.erase(activationLayer->GetGuid()); |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 381 | } |
| 382 | } |
| 383 | } |
| 384 | } |
| 385 | } |
| 386 | } |
| 387 | } |
Matthew Sloyan | 5fc0fd6 | 2021-05-03 12:22:03 +0100 | [diff] [blame] | 388 | |
| 389 | // Separate reduce layer with multiple axes into multiple reduce layers with 1 axis. |
| 390 | if (base.GetType() == LayerType::Reduce) |
| 391 | { |
| 392 | ReduceLayer* baseLayer = PolymorphicDowncast<ReduceLayer*>(&base); |
| 393 | ReduceDescriptor reduceDescriptor = baseLayer->GetParameters(); |
| 394 | |
| 395 | if (!reduceDescriptor.m_vAxis.empty() && reduceDescriptor.m_vAxis.size() > 1) |
| 396 | { |
| 397 | // Add new layers to the graph and connect them. |
| 398 | std::vector<Layer*> layers = ChainReduceLayers<ReduceLayer>(optimizationViews, |
| 399 | baseLayer, |
| 400 | reduceDescriptor); |
| 401 | |
| 402 | // Replace existing baselayer with new subgraph. |
| 403 | ReplaceLayers<ReduceLayer>(optimizationViews, baseLayer, layers); |
| 404 | untouched.erase(baseLayer->GetGuid()); |
| 405 | } |
| 406 | } |
Mike Kelly | 07810fc | 2020-11-12 10:58:48 +0000 | [diff] [blame] | 407 | } |
| 408 | |
| 409 | if (optimizationViews.GetSubstitutions().empty()) |
| 410 | { |
| 411 | optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph)); |
| 412 | } |
Mike Kelly | 1ac690a | 2020-11-17 11:41:38 +0000 | [diff] [blame] | 413 | else |
| 414 | { |
| 415 | ReportUntouchedLayers(optimizationViews, untouched); |
| 416 | } |
Matteo Martincigh | c3ba50e | 2019-05-22 14:28:16 +0100 | [diff] [blame] | 417 | |
| 418 | return optimizationViews; |
Matteo Martincigh | adddddb | 2019-01-24 14:06:23 +0000 | [diff] [blame] | 419 | } |
| 420 | |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 421 | std::vector<ITensorHandleFactory::FactoryId> NeonBackend::GetHandleFactoryPreferences() const |
| 422 | { |
Narumol Prangnawarat | 265e53e | 2020-10-30 16:06:55 +0000 | [diff] [blame] | 423 | return std::vector<ITensorHandleFactory::FactoryId>() = { NeonTensorHandleFactory::GetIdStatic() }; |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 424 | } |
| 425 | |
| 426 | void NeonBackend::RegisterTensorHandleFactories(class TensorHandleFactoryRegistry& registry) |
| 427 | { |
| 428 | auto memoryManager = std::make_shared<NeonMemoryManager>(std::make_unique<arm_compute::Allocator>(), |
| 429 | BaseMemoryManager::MemoryAffinity::Offset); |
| 430 | |
| 431 | registry.RegisterMemoryManager(memoryManager); |
Jan Eilers | e9f0f0f | 2019-08-16 10:28:37 +0100 | [diff] [blame] | 432 | registry.RegisterFactory(std::make_unique<NeonTensorHandleFactory>(memoryManager)); |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 433 | } |
| 434 | |
Matthew Bentham | 42bad95 | 2018-12-17 09:23:36 +0000 | [diff] [blame] | 435 | } // namespace armnn |