Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
| 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | |
| 6 | #include "NeonTensorHandleFactory.hpp" |
| 7 | #include "NeonTensorHandle.hpp" |
| 8 | |
Narumol Prangnawarat | 1a26896 | 2020-07-27 15:52:13 +0100 | [diff] [blame] | 9 | #include "Layer.hpp" |
| 10 | |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 11 | #include <armnn/utility/IgnoreUnused.hpp> |
Jan Eilers | bb446e5 | 2020-04-02 13:56:54 +0100 | [diff] [blame] | 12 | #include <armnn/utility/PolymorphicDowncast.hpp> |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 13 | |
| 14 | namespace armnn |
| 15 | { |
| 16 | |
Jan Eilers | e9f0f0f | 2019-08-16 10:28:37 +0100 | [diff] [blame] | 17 | using FactoryId = ITensorHandleFactory::FactoryId; |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 18 | |
| 19 | std::unique_ptr<ITensorHandle> NeonTensorHandleFactory::CreateSubTensorHandle(ITensorHandle& parent, |
Jan Eilers | e9f0f0f | 2019-08-16 10:28:37 +0100 | [diff] [blame] | 20 | const TensorShape& subTensorShape, |
| 21 | const unsigned int* subTensorOrigin) |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 22 | const |
| 23 | { |
| 24 | const arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape); |
| 25 | |
| 26 | arm_compute::Coordinates coords; |
| 27 | coords.set_num_dimensions(subTensorShape.GetNumDimensions()); |
Jan Eilers | e9f0f0f | 2019-08-16 10:28:37 +0100 | [diff] [blame] | 28 | for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); ++i) |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 29 | { |
| 30 | // Arm compute indexes tensor coords in reverse order. |
| 31 | unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1; |
| 32 | coords.set(i, boost::numeric_cast<int>(subTensorOrigin[revertedIndex])); |
| 33 | } |
| 34 | |
| 35 | const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape()); |
David Monahan | 49895f4 | 2020-07-21 11:16:51 +0100 | [diff] [blame] | 36 | |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 37 | if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape)) |
| 38 | { |
| 39 | return nullptr; |
| 40 | } |
| 41 | |
| 42 | return std::make_unique<NeonSubTensorHandle>( |
Jan Eilers | bb446e5 | 2020-04-02 13:56:54 +0100 | [diff] [blame] | 43 | PolymorphicDowncast<IAclTensorHandle*>(&parent), shape, coords); |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 44 | } |
| 45 | |
David Monahan | c6e5a6e | 2019-10-02 09:33:57 +0100 | [diff] [blame] | 46 | std::unique_ptr<ITensorHandle> NeonTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo) const |
| 47 | { |
| 48 | return NeonTensorHandleFactory::CreateTensorHandle(tensorInfo, true); |
| 49 | } |
| 50 | |
| 51 | std::unique_ptr<ITensorHandle> NeonTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo, |
| 52 | DataLayout dataLayout) const |
| 53 | { |
| 54 | return NeonTensorHandleFactory::CreateTensorHandle(tensorInfo, dataLayout, true); |
| 55 | } |
| 56 | |
David Monahan | 3fb7e10 | 2019-08-20 11:25:29 +0100 | [diff] [blame] | 57 | std::unique_ptr<ITensorHandle> NeonTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo, |
| 58 | const bool IsMemoryManaged) const |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 59 | { |
| 60 | auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo); |
David Monahan | 3fb7e10 | 2019-08-20 11:25:29 +0100 | [diff] [blame] | 61 | if (IsMemoryManaged) |
| 62 | { |
| 63 | tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup()); |
| 64 | } |
| 65 | // If we are not Managing the Memory then we must be importing |
| 66 | tensorHandle->SetImportEnabledFlag(!IsMemoryManaged); |
James Conroy | 57d10b7 | 2019-10-25 09:44:14 +0100 | [diff] [blame] | 67 | tensorHandle->SetImportFlags(GetImportFlags()); |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 68 | |
| 69 | return tensorHandle; |
| 70 | } |
| 71 | |
| 72 | std::unique_ptr<ITensorHandle> NeonTensorHandleFactory::CreateTensorHandle(const TensorInfo& tensorInfo, |
David Monahan | 3fb7e10 | 2019-08-20 11:25:29 +0100 | [diff] [blame] | 73 | DataLayout dataLayout, |
| 74 | const bool IsMemoryManaged) const |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 75 | { |
| 76 | auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo, dataLayout); |
David Monahan | 3fb7e10 | 2019-08-20 11:25:29 +0100 | [diff] [blame] | 77 | if (IsMemoryManaged) |
| 78 | { |
| 79 | tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup()); |
| 80 | } |
| 81 | // If we are not Managing the Memory then we must be importing |
| 82 | tensorHandle->SetImportEnabledFlag(!IsMemoryManaged); |
James Conroy | 57d10b7 | 2019-10-25 09:44:14 +0100 | [diff] [blame] | 83 | tensorHandle->SetImportFlags(GetImportFlags()); |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 84 | |
| 85 | return tensorHandle; |
| 86 | } |
| 87 | |
Jan Eilers | e9f0f0f | 2019-08-16 10:28:37 +0100 | [diff] [blame] | 88 | const FactoryId& NeonTensorHandleFactory::GetIdStatic() |
| 89 | { |
| 90 | static const FactoryId s_Id(NeonTensorHandleFactoryId()); |
| 91 | return s_Id; |
| 92 | } |
| 93 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 94 | const FactoryId& NeonTensorHandleFactory::GetId() const |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 95 | { |
Jan Eilers | e9f0f0f | 2019-08-16 10:28:37 +0100 | [diff] [blame] | 96 | return GetIdStatic(); |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 97 | } |
| 98 | |
| 99 | bool NeonTensorHandleFactory::SupportsSubTensors() const |
| 100 | { |
| 101 | return true; |
| 102 | } |
| 103 | |
| 104 | MemorySourceFlags NeonTensorHandleFactory::GetExportFlags() const |
| 105 | { |
James Conroy | 57d10b7 | 2019-10-25 09:44:14 +0100 | [diff] [blame] | 106 | return 0; |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | MemorySourceFlags NeonTensorHandleFactory::GetImportFlags() const |
| 110 | { |
James Conroy | ffab16f | 2019-11-07 14:37:09 +0000 | [diff] [blame] | 111 | return 0; |
Narumol Prangnawarat | 4e3e818 | 2019-08-14 12:25:50 +0100 | [diff] [blame] | 112 | } |
| 113 | |
Narumol Prangnawarat | 1a26896 | 2020-07-27 15:52:13 +0100 | [diff] [blame] | 114 | std::vector<Capability> NeonTensorHandleFactory::GetCapabilities(const IConnectableLayer* layer, |
| 115 | const IConnectableLayer* connectedLayer, |
| 116 | CapabilityClass capabilityClass) |
| 117 | |
| 118 | { |
| 119 | IgnoreUnused(connectedLayer); |
| 120 | std::vector<Capability> capabilities; |
| 121 | if (capabilityClass == CapabilityClass::PaddingRequired) |
| 122 | { |
| 123 | auto search = paddingRequiredLayers.find((PolymorphicDowncast<const Layer*>(layer))->GetType()); |
| 124 | if ( search != paddingRequiredLayers.end()) |
| 125 | { |
| 126 | Capability paddingCapability(CapabilityClass::PaddingRequired, true); |
| 127 | capabilities.push_back(paddingCapability); |
| 128 | } |
| 129 | } |
| 130 | return capabilities; |
| 131 | } |
| 132 | |
Ferran Balaguer | bfeb271 | 2019-08-07 15:14:56 +0100 | [diff] [blame] | 133 | } // namespace armnn |