surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 1 | // |
Declan-ARM | 7c75e33 | 2024-03-12 16:40:25 +0000 | [diff] [blame] | 2 | // Copyright © 2017-2024 Arm Ltd and Contributors. All rights reserved. |
David Beck | ecb56cd | 2018-09-05 12:52:57 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 4 | // |
| 5 | #include "SplitterLayer.hpp" |
| 6 | |
| 7 | #include "LayerCloneBase.hpp" |
| 8 | |
| 9 | #include <armnn/TypesUtils.hpp> |
Colm Donelan | 0c47974 | 2021-12-10 12:43:54 +0000 | [diff] [blame] | 10 | #include <armnn/backends/WorkloadData.hpp> |
| 11 | #include <armnn/backends/WorkloadFactory.hpp> |
Teresa Charlin | 7db7089 | 2024-04-23 13:43:03 +0100 | [diff] [blame^] | 12 | #include <backendsCommon/WorkloadUtils.hpp> |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 13 | |
| 14 | namespace armnn |
| 15 | { |
| 16 | |
| 17 | SplitterLayer::SplitterLayer(const ViewsDescriptor& param, const char* name) |
| 18 | : LayerWithParameters(1, param.GetNumViews(), LayerType::Splitter, param, name) |
| 19 | { |
| 20 | } |
| 21 | |
Derek Lamberti | 94a88d2 | 2019-12-10 21:12:59 +0000 | [diff] [blame] | 22 | std::unique_ptr<IWorkload> SplitterLayer::CreateWorkload(const IWorkloadFactory& factory) const |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 23 | { |
| 24 | SplitterQueueDescriptor descriptor; |
| 25 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 26 | // Copies the window origins to the descriptor. |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 27 | for (unsigned int i = 0; i < m_Param.GetNumViews(); ++i) |
| 28 | { |
| 29 | descriptor.m_ViewOrigins.emplace_back( |
| 30 | std::vector<unsigned int>(m_Param.GetViewOrigin(i), m_Param.GetViewOrigin(i) + m_Param.GetNumDimensions())); |
| 31 | } |
| 32 | |
Keith Davis | df04d23 | 2020-10-23 17:20:05 +0100 | [diff] [blame] | 33 | SetAdditionalInfo(descriptor); |
| 34 | |
Teresa Charlin | 611c7fb | 2022-01-07 09:47:29 +0000 | [diff] [blame] | 35 | return factory.CreateWorkload(LayerType::Splitter, descriptor, PrepInfoAndDesc(descriptor)); |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 36 | } |
| 37 | |
Derek Lamberti | 84da38b | 2019-06-13 11:40:08 +0100 | [diff] [blame] | 38 | template<typename FactoryType> |
Narumol Prangnawarat | ef6f300 | 2020-08-17 17:02:12 +0100 | [diff] [blame] | 39 | void SplitterLayer::CreateTensors(const TensorHandleFactoryRegistry& registry, |
| 40 | const FactoryType& factory, |
| 41 | bool isMemoryManaged) |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 42 | { |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 43 | //If sub tensors are supported than all the "splitter" need to do is to |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 44 | //set the outputs to be appropriate sub tensors of the input. |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 45 | bool useSubTensors = factory.SupportsSubTensors(); |
| 46 | |
| 47 | if (useSubTensors) |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 48 | { |
Keith Davis | 3674f14 | 2020-08-16 23:44:15 +0100 | [diff] [blame] | 49 | // Get outputHandler of previous layer |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 50 | const OutputHandler& outputHandler = GetInputSlots()[0].GetConnectedOutputSlot()->GetOutputHandler(); |
Keith Davis | 3674f14 | 2020-08-16 23:44:15 +0100 | [diff] [blame] | 51 | const OutputSlot* slot = GetInputSlots()[0].GetConnectedOutputSlot(); |
Mike Kelly | 7b89992 | 2023-07-17 14:17:52 +0100 | [diff] [blame] | 52 | const TensorInfo& parentInfo = GetInputSlot(0).GetTensorInfo(); |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 53 | |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 54 | ITensorHandle* inputData = outputHandler.GetData(); |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 55 | |
| 56 | std::vector<std::unique_ptr<ITensorHandle>> subTensors; |
| 57 | |
Keith Davis | 3674f14 | 2020-08-16 23:44:15 +0100 | [diff] [blame] | 58 | // check if split is along the x or y (2 innermost dimensions) |
| 59 | auto numberOfDimensions = m_Param.GetNumDimensions(); |
| 60 | |
Keith Davis | 3674f14 | 2020-08-16 23:44:15 +0100 | [diff] [blame] | 61 | std::set<unsigned int> axis = ComputeSplitAxis(m_Param, parentInfo.GetShape()); |
| 62 | std::set<unsigned int>::iterator axisIt = axis.begin(); |
| 63 | |
| 64 | bool isOnXorY = m_Param.GetNumDimensions() >= 3 && |
| 65 | ((*axisIt == numberOfDimensions - 1) || |
| 66 | (*axisIt == numberOfDimensions - 2)); |
| 67 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 68 | //Creates the outputs as subtensors of the input. |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 69 | for (unsigned int i = 0; i < m_Param.GetNumViews(); ++i) |
| 70 | { |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 71 | const TensorInfo& info = m_OutputHandlers[i].GetTensorInfo(); |
| 72 | |
Derek Lamberti | 84da38b | 2019-06-13 11:40:08 +0100 | [diff] [blame] | 73 | OutputSlot& outSlot = GetOutputSlot(i); |
| 74 | ITensorHandleFactory::FactoryId factoryId = outSlot.GetTensorHandleFactoryId(); |
Keith Davis | 3674f14 | 2020-08-16 23:44:15 +0100 | [diff] [blame] | 75 | |
| 76 | const unsigned int numOutputSlots = GetNumOutputSlots(); |
| 77 | |
| 78 | // if split along x or y (2 innermost dimensions) and the next layers do not require padding |
| 79 | bool canUseSubTensorOnXorY = true; |
| 80 | bool isTensorHandleFactory = std::is_same<armnn::ITensorHandleFactory, FactoryType>::value; |
| 81 | if (isTensorHandleFactory) |
| 82 | { |
| 83 | for (unsigned int it = 0; it < numOutputSlots; ++it) |
| 84 | { |
| 85 | InputSlot* inputSlot = GetOutputSlot(it).GetConnection(0); |
| 86 | ITensorHandleFactory* handleFactory = registry.GetFactory(factoryId); |
| 87 | std::vector<Capability> capabilities = |
| 88 | handleFactory->GetCapabilities(&(inputSlot->GetOwningLayer()), |
| 89 | this, |
| 90 | CapabilityClass::PaddingRequired); |
| 91 | if (isOnXorY) |
| 92 | { |
| 93 | canUseSubTensorOnXorY = false; |
| 94 | if (capabilities.empty()) |
| 95 | { |
| 96 | canUseSubTensorOnXorY = true; |
| 97 | } |
| 98 | } |
| 99 | |
| 100 | if (!canUseSubTensorOnXorY) |
| 101 | { |
| 102 | break; |
| 103 | } |
| 104 | } |
| 105 | } |
| 106 | |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 107 | auto CreateSubTensor = [&]() |
| 108 | { |
Keith Davis | 3674f14 | 2020-08-16 23:44:15 +0100 | [diff] [blame] | 109 | // Make sure: |
| 110 | // 1) quantization parameters are in the same space |
| 111 | // 2) the same TensorHandleFactory is used for input and split layer output |
| 112 | // 3) the output does not go to a Constant layer or input layer |
| 113 | // 4) if split along x or y (2 innermost dimensions) and the next layers do not require padding |
| 114 | if (parentInfo.IsTypeSpaceMatch(info) && //(1) |
| 115 | factoryId == slot->GetTensorHandleFactoryId() && //(2) |
| 116 | GetOutputSlot(i).GetConnection(0)->GetOwningLayer().GetType() != LayerType::Constant && //(3) |
| 117 | GetOutputSlot(i).GetConnection(0)->GetOwningLayer().GetType() != LayerType::Input && //(3) |
Mike Kelly | a638f10 | 2023-07-24 17:42:47 +0100 | [diff] [blame] | 118 | canUseSubTensorOnXorY) //(4) |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 119 | { |
Teresa Charlin | ec01fb7 | 2020-08-16 23:40:14 +0100 | [diff] [blame] | 120 | ARMNN_NO_DEPRECATE_WARN_BEGIN |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 121 | return factory.CreateSubTensorHandle(*inputData, |
| 122 | info.GetShape(), |
| 123 | this->m_Param.GetViewOrigin(i)); |
Teresa Charlin | ec01fb7 | 2020-08-16 23:40:14 +0100 | [diff] [blame] | 124 | ARMNN_NO_DEPRECATE_WARN_END |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 125 | } |
| 126 | return std::unique_ptr<ITensorHandle>(); |
| 127 | }; |
| 128 | |
| 129 | auto subTensor = CreateSubTensor(); |
| 130 | if (!subTensor) |
| 131 | { |
| 132 | useSubTensors = false; |
| 133 | break; //Failed to create a valid sub-tensor, so stop trying with the rest of the views. |
| 134 | } |
| 135 | subTensors.push_back(std::move(subTensor)); |
| 136 | } |
| 137 | |
| 138 | if (useSubTensors) |
| 139 | { |
| 140 | unsigned int i = 0; |
| 141 | for (auto& subTensor : subTensors) |
| 142 | { |
| 143 | m_OutputHandlers[i].SetData(std::move(subTensor)); |
| 144 | ++i; |
| 145 | } |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 146 | } |
| 147 | } |
Narumol Prangnawarat | 15eb583 | 2019-05-20 15:31:05 +0100 | [diff] [blame] | 148 | |
| 149 | if (!useSubTensors) |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 150 | { |
| 151 | for (unsigned int i = 0; i < m_Param.GetNumViews(); ++i) |
| 152 | { |
Narumol Prangnawarat | ef6f300 | 2020-08-17 17:02:12 +0100 | [diff] [blame] | 153 | m_OutputHandlers[i].CreateTensorHandles(factory, isMemoryManaged); |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 154 | } |
| 155 | } |
| 156 | } |
| 157 | |
Derek Lamberti | 84da38b | 2019-06-13 11:40:08 +0100 | [diff] [blame] | 158 | void SplitterLayer::CreateTensorHandles(const TensorHandleFactoryRegistry& registry, |
David Monahan | 3fb7e10 | 2019-08-20 11:25:29 +0100 | [diff] [blame] | 159 | const IWorkloadFactory& workloadFactory, |
Narumol Prangnawarat | e5f0b24 | 2021-05-07 17:52:36 +0100 | [diff] [blame] | 160 | const bool isMemoryManaged) |
Derek Lamberti | 84da38b | 2019-06-13 11:40:08 +0100 | [diff] [blame] | 161 | { |
| 162 | OutputSlot& slot = GetOutputSlot(0); |
| 163 | ITensorHandleFactory::FactoryId factoryId = slot.GetTensorHandleFactoryId(); |
| 164 | |
| 165 | if (factoryId == ITensorHandleFactory::LegacyFactoryId) |
| 166 | { |
Narumol Prangnawarat | ef6f300 | 2020-08-17 17:02:12 +0100 | [diff] [blame] | 167 | CreateTensors(registry, workloadFactory, isMemoryManaged); |
Derek Lamberti | 84da38b | 2019-06-13 11:40:08 +0100 | [diff] [blame] | 168 | } |
| 169 | else |
| 170 | { |
Narumol Prangnawarat | e5f0b24 | 2021-05-07 17:52:36 +0100 | [diff] [blame] | 171 | ITensorHandleFactory* handleFactory = registry.GetFactory(factoryId); |
Declan-ARM | 7c75e33 | 2024-03-12 16:40:25 +0000 | [diff] [blame] | 172 | if (!handleFactory) |
| 173 | { |
| 174 | throw armnn::NullPointerException("handleFactory is returning a nullptr."); |
| 175 | } |
Narumol Prangnawarat | ef6f300 | 2020-08-17 17:02:12 +0100 | [diff] [blame] | 176 | CreateTensors(registry, *handleFactory, isMemoryManaged); |
Derek Lamberti | 84da38b | 2019-06-13 11:40:08 +0100 | [diff] [blame] | 177 | } |
| 178 | } |
| 179 | |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 180 | SplitterLayer* SplitterLayer::Clone(Graph& graph) const |
| 181 | { |
| 182 | return CloneBase<SplitterLayer>(graph, m_Param, GetName()); |
| 183 | } |
| 184 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 185 | std::vector<TensorShape> SplitterLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 186 | { |
Declan-ARM | 7c75e33 | 2024-03-12 16:40:25 +0000 | [diff] [blame] | 187 | if (inputShapes.size() != m_Param.GetNumViews()) |
| 188 | { |
| 189 | throw armnn::Exception("inputShapes' and m_NumViews' sizes do not match (\"" |
| 190 | + std::to_string(inputShapes.size()) + |
| 191 | "\" vs \"" |
| 192 | + std::to_string(m_Param.GetNumViews()) + "\")"); |
| 193 | } |
| 194 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 195 | std::vector<TensorShape> outShapes; |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 196 | //Output shapes must match View shapes. |
| 197 | for (unsigned int viewIdx = 0; viewIdx < m_Param.GetNumViews(); viewIdx++) |
| 198 | { |
| 199 | const uint32_t* sizes = m_Param.GetViewSizes(viewIdx); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 200 | outShapes.push_back(TensorShape(m_Param.GetNumDimensions(), sizes)); |
| 201 | } |
| 202 | return outShapes; |
| 203 | } |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 204 | |
Finn Williams | f24effa | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 205 | void SplitterLayer::ValidateTensorShapesFromInputs() |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 206 | { |
Finn Williams | 87d0bda | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 207 | std::for_each(BeginOutputSlots(), EndOutputSlots(), [&](OutputSlot& outputSlot) |
| 208 | { |
Finn Williams | f24effa | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 209 | VerifyShapeInferenceType(outputSlot.GetTensorInfo().GetShape(), m_ShapeInferenceMethod); |
Finn Williams | 87d0bda | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 210 | }); |
Teresa Charlin | cdc0149 | 2020-06-09 18:00:20 +0100 | [diff] [blame] | 211 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 212 | std::vector<TensorShape> views; |
| 213 | for (unsigned int viewIdx = 0; viewIdx < m_Param.GetNumViews(); viewIdx++) |
| 214 | { |
| 215 | const uint32_t* sizes = m_Param.GetViewSizes(viewIdx); |
| 216 | views.push_back(TensorShape(m_Param.GetNumDimensions(), sizes)); |
| 217 | } |
| 218 | |
| 219 | auto inferredShapes = InferOutputShapes(views); |
| 220 | |
Declan-ARM | 7c75e33 | 2024-03-12 16:40:25 +0000 | [diff] [blame] | 221 | if (inferredShapes.size() != m_Param.GetNumViews()) |
| 222 | { |
| 223 | throw armnn::LayerValidationException("inferredShapes' size and m_NumViews do not match (\"" |
| 224 | + std::to_string(inferredShapes.size()) + |
| 225 | "\" vs \"" |
| 226 | + std::to_string(m_Param.GetNumViews()) + "\")"); |
| 227 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 228 | |
| 229 | for (unsigned int viewIdx = 0; viewIdx < m_Param.GetNumViews(); viewIdx++) |
| 230 | { |
Finn Williams | 87d0bda | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 231 | ValidateAndCopyShape(GetOutputSlot(viewIdx).GetTensorInfo().GetShape(), |
| 232 | inferredShapes[viewIdx], |
Finn Williams | f24effa | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 233 | m_ShapeInferenceMethod, |
Finn Williams | 87d0bda | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 234 | "SplitterLayer", |
| 235 | viewIdx); |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 236 | } |
| 237 | } |
| 238 | |
Nikhil Raj | 4d2eec0 | 2022-05-30 11:08:52 +0100 | [diff] [blame] | 239 | void SplitterLayer::ExecuteStrategy(IStrategy& strategy) const |
jimfly01 | e9e7bfd | 2019-01-24 22:29:33 +0000 | [diff] [blame] | 240 | { |
Nikhil Raj | 4d2eec0 | 2022-05-30 11:08:52 +0100 | [diff] [blame] | 241 | strategy.ExecuteStrategy(this, GetParameters(), {}, GetName()); |
jimfly01 | e9e7bfd | 2019-01-24 22:29:33 +0000 | [diff] [blame] | 242 | } |
| 243 | |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 244 | } // namespace armnn |