blob: 0d36110da5e599a4e6426bfe4bc3a1ceae7aa6af [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Keith Davis69e653f2020-07-02 11:49:26 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tar56055192018-11-12 18:10:43 +00005
David Beck79141b92018-10-23 16:09:36 +01006#include "NeonBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "NeonBackendModelContext.hpp"
Aron Virginas-Tar56055192018-11-12 18:10:43 +00008#include "NeonTensorHandle.hpp"
9#include "NeonWorkloadFactory.hpp"
10
David Beck0dbe0ee2018-09-24 15:59:27 +010011#include <Layer.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Aron Virginas-Tar56055192018-11-12 18:10:43 +000013#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000014#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010015#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010016#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010017
Aron Virginas-Tar56055192018-11-12 18:10:43 +000018#include <backendsCommon/CpuTensorHandle.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000019#include <backendsCommon/MakeWorkloadHelper.hpp>
Aron Virginas-Tar56055192018-11-12 18:10:43 +000020#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010021#include <backendsCommon/MemImportWorkload.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022
Aron Virginas-Tar56055192018-11-12 18:10:43 +000023#include <neon/workloads/NeonWorkloadUtils.hpp>
24#include <neon/workloads/NeonWorkloads.hpp>
25
telsoa014fcda012018-03-09 14:13:49 +000026namespace armnn
27{
28
David Beck79141b92018-10-23 16:09:36 +010029namespace
30{
31static const BackendId s_Id{NeonBackendId()};
32}
33
David Beck29c75de2018-10-23 13:35:58 +010034bool NeonWorkloadFactory::IsLayerSupported(const Layer& layer,
35 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010036 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000037{
David Beck79141b92018-10-23 16:09:36 +010038 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
39}
40
Sadik Armagan04a72972020-09-14 15:44:18 +010041bool NeonWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
42 Optional<DataType> dataType,
43 std::string& outReasonIfUnsupported,
44 const ModelOptions& modelOptions)
45{
46 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
47}
48
David Beck79141b92018-10-23 16:09:36 +010049const BackendId& NeonWorkloadFactory::GetBackendId() const
50{
51 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000052}
53
Aron Virginas-Tar56055192018-11-12 18:10:43 +000054NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +010055 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
56{
57}
58
59NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager,
60 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
61 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
surmeh013537c2c2018-05-18 16:31:43 +010062{
63}
64
telsoa014fcda012018-03-09 14:13:49 +000065std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
66 TensorShape const& subTensorShape,
67 unsigned int const* subTensorOrigin) const
68{
telsoa014fcda012018-03-09 14:13:49 +000069 const arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
70
71 arm_compute::Coordinates coords;
72 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
73 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
74 {
telsoa01c577f2c2018-08-31 09:22:23 +010075 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +000076 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +010077 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +000078 }
79
Derek Lamberti0790dce2019-04-15 18:37:35 +010080 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
81 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
82 {
83 return nullptr;
84 }
85
telsoa01c577f2c2018-08-31 09:22:23 +010086 return std::make_unique<NeonSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +010087 PolymorphicDowncast<IAclTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +000088}
89
David Monahan3fb7e102019-08-20 11:25:29 +010090std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
91 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +000092{
telsoa01c577f2c2018-08-31 09:22:23 +010093 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo);
David Monahan3fb7e102019-08-20 11:25:29 +010094 if (IsMemoryManaged)
95 {
96 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
97 }
telsoa01c577f2c2018-08-31 09:22:23 +010098 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +000099}
100
Francis Murtagh351d13d2018-09-24 15:01:18 +0100101std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100102 DataLayout dataLayout,
103 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100104{
105 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo, dataLayout);
David Monahan3fb7e102019-08-20 11:25:29 +0100106 if (IsMemoryManaged)
107 {
108 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
109 }
Francis Murtagh351d13d2018-09-24 15:01:18 +0100110 return tensorHandle;
111}
112
Aron Virginas-Tar914e4db2019-09-09 13:36:45 +0100113std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
114 const WorkloadInfo& info) const
115{
Jan Eilers8eb25602020-03-09 12:13:48 +0000116 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600117
118 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
119 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Abs);
120
121 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tar914e4db2019-09-09 13:36:45 +0100122}
123
telsoa014fcda012018-03-09 14:13:49 +0000124std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
125 const WorkloadInfo& info) const
126{
Nattapat Chaimanowongd4b70592018-10-12 11:21:49 +0100127 return std::make_unique<NeonActivationWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000128}
129
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100130std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
131 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000132{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100133 return std::make_unique<NeonAdditionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000134}
135
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100136std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
137 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000138{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100139 return std::make_unique<NeonArgMinMaxWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000140}
141
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100142std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateBatchNormalization(
143 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
144{
145 return std::make_unique<NeonBatchNormalizationWorkload>(descriptor, info);
146}
147
148std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
149 const WorkloadInfo& info) const
150{
Mike Kelly56858022020-01-27 12:14:47 +0000151 return std::make_unique<NeonBatchToSpaceNdWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100152}
153
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100154std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
155 const WorkloadInfo& info) const
156{
Teresa Charlincedd34f2020-03-30 11:17:30 +0100157 return std::make_unique<NeonComparisonWorkload>(descriptor, info);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100158}
159
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100160std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000161 const WorkloadInfo& info) const
162{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100163 return std::make_unique<NeonConcatWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000164}
165
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100166std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
167 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000168{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100169 return std::make_unique<NeonConstantWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000170}
171
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100172std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertBf16ToFp32(
173 const ConvertBf16ToFp32QueueDescriptor& descriptor,
174 const WorkloadInfo& info) const
175{
176 return std::make_unique<NeonConvertBf16ToFp32Workload>(descriptor, info);
177}
178
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100179std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp16ToFp32(
180 const ConvertFp16ToFp32QueueDescriptor& descriptor,
181 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000182{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100183 return std::make_unique<NeonConvertFp16ToFp32Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000184}
185
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100186std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToBf16(
187 const ConvertFp32ToBf16QueueDescriptor& descriptor,
188 const WorkloadInfo& info) const
189{
190 return std::make_unique<NeonConvertFp32ToBf16Workload>(descriptor, info);
191}
192
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100193std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToFp16(
194 const ConvertFp32ToFp16QueueDescriptor& descriptor,
195 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000196{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100197 return std::make_unique<NeonConvertFp32ToFp16Workload>(descriptor, info);
Nikhil Raj9b461482019-07-03 15:58:31 +0100198}
199
telsoa014fcda012018-03-09 14:13:49 +0000200std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConvolution2d(
201 const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
202{
Sadik Armagan04a72972020-09-14 15:44:18 +0100203 bool isFastMathEnabled = false;
204 if (m_ModelContextPtr)
205 {
206 if (m_ModelContextPtr.get() != nullptr)
207 {
208 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
209 if (modelOptions)
210 {
211 isFastMathEnabled = modelOptions->IsFastMathEnabled();
212 }
213 }
214 }
215 return std::make_unique<NeonConvolution2dWorkload>(descriptor,
216 info,
217 m_MemoryManager->GetIntraLayerManager(),
218 isFastMathEnabled);
telsoa014fcda012018-03-09 14:13:49 +0000219}
220
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100221std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
222 const WorkloadInfo& info) const
223{
224 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
225}
226
Aron Virginas-Tar2f00b742019-09-30 13:28:08 +0100227std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
228 const WorkloadInfo& info) const
229{
230 return std::make_unique<NeonDepthToSpaceWorkload>(descriptor, info);
231}
232
telsoa014fcda012018-03-09 14:13:49 +0000233std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthwiseConvolution2d(
234 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
235{
Nattapat Chaimanowong77140882018-10-17 11:12:19 +0100236 return std::make_unique<NeonDepthwiseConvolutionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000237}
238
Narumol Prangnawarat01961a72019-05-30 16:47:12 +0100239std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
240 const WorkloadInfo& info) const
241{
242 return std::make_unique<NeonDequantizeWorkload>(descriptor, info);
243}
244
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000245std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDetectionPostProcess(
246 const armnn::DetectionPostProcessQueueDescriptor& descriptor, const armnn::WorkloadInfo& info) const
247{
James Conroyd9fb6e22020-02-21 16:52:44 +0000248 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000249}
250
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100251std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateDivision(
252 const DivisionQueueDescriptor& descriptor, const WorkloadInfo& info) const
253{
Pablo Telloe61f0712020-01-23 10:37:17 +0000254 return std::make_unique<NeonDivisionWorkload>(descriptor, info);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100255}
256
Sadik Armaganac472102020-03-24 09:54:36 +0000257std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateElementwiseUnary(
258 const ElementwiseUnaryQueueDescriptor& descriptor, const WorkloadInfo& info) const
josh minor4a3c6102020-01-06 16:40:46 -0600259{
Derek Lambertic77874a2020-04-28 13:34:56 +0100260 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600261 {
Derek Lambertic77874a2020-04-28 13:34:56 +0100262 case UnaryOperation::Abs:
James Conroy177df1e2020-11-13 10:18:51 +0000263 {
264 AbsQueueDescriptor absQueueDescriptor;
265 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
266 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600267
James Conroy177df1e2020-11-13 10:18:51 +0000268 return std::make_unique<NeonAbsWorkload>(absQueueDescriptor, info);
269 }
Derek Lambertic77874a2020-04-28 13:34:56 +0100270 case UnaryOperation::Rsqrt:
James Conroy177df1e2020-11-13 10:18:51 +0000271 {
272 RsqrtQueueDescriptor rsqrtQueueDescriptor;
273 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
274 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600275
James Conroy177df1e2020-11-13 10:18:51 +0000276 return std::make_unique<NeonRsqrtWorkload>(rsqrtQueueDescriptor, info);
277 }
Derek Lambertic77874a2020-04-28 13:34:56 +0100278 case UnaryOperation::Neg:
279 return std::make_unique<NeonNegWorkload>(descriptor, info);
280 case UnaryOperation::Exp:
281 return std::make_unique<NeonExpWorkload>(descriptor, info);
James Conroy177df1e2020-11-13 10:18:51 +0000282 case UnaryOperation::LogicalNot:
283 return std::make_unique<NeonLogicalNotWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100284 default:
285 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600286 }
josh minor4a3c6102020-01-06 16:40:46 -0600287}
288
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100289std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
290 const WorkloadInfo& info) const
David Beckc2044fe2018-09-05 15:00:38 +0100291{
Jan Eilers8eb25602020-03-09 12:13:48 +0000292 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100293
294 ComparisonQueueDescriptor comparisonDescriptor;
295 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Equal);
296
297 return CreateComparison(comparisonDescriptor, info);
David Beckc2044fe2018-09-05 15:00:38 +0100298}
299
Sadik Armagana792a052020-06-23 16:22:23 +0100300std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
301 const WorkloadInfo& info) const
302{
303 return std::make_unique<NeonFillWorkload>(descriptor, info);
304}
305
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100306std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
307 const WorkloadInfo& info) const
308{
309 return MakeWorkloadHelper<NeonFloorFloatWorkload, NullWorkload>(descriptor, info);
310}
311
312std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateFullyConnected(
313 const FullyConnectedQueueDescriptor& descriptor, const WorkloadInfo& info) const
314{
Kevin Maybe7e35c2020-04-29 17:05:05 +0100315 return std::make_unique<NeonFullyConnectedWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100316}
317
318std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateGather(const armnn::GatherQueueDescriptor& descriptor,
319 const armnn::WorkloadInfo& info) const
320{
Teresa Charlinf540eb82020-04-10 19:24:55 +0100321 return std::make_unique<NeonGatherWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100322}
323
324std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
325 const WorkloadInfo& info) const
326{
Jan Eilers8eb25602020-03-09 12:13:48 +0000327 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100328
329 ComparisonQueueDescriptor comparisonDescriptor;
330 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Greater);
331
332 return CreateComparison(comparisonDescriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100333}
334
335std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
336 const WorkloadInfo& info) const
337{
338 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
339}
340
341std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInstanceNormalization(
342 const InstanceNormalizationQueueDescriptor& descriptor,
343 const WorkloadInfo& info) const
344{
345 return std::make_unique<NeonInstanceNormalizationWorkload>(descriptor, info);
346}
347
348std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
349 const WorkloadInfo& info) const
350{
351 return MakeWorkloadHelper<NeonL2NormalizationFloatWorkload, NullWorkload>(descriptor, info,
352 m_MemoryManager->GetIntraLayerManager());
353}
354
Keith Davis69e653f2020-07-02 11:49:26 +0100355std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
356 const WorkloadInfo& info) const
357{
358 return std::make_unique<NeonLogSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
359}
360
James Conroy177df1e2020-11-13 10:18:51 +0000361std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
362 const WorkloadInfo& info) const
363{
364 switch(descriptor.m_Parameters.m_Operation)
365 {
366 case LogicalBinaryOperation::LogicalAnd:
367 return std::make_unique<NeonLogicalAndWorkload>(descriptor, info);
368 case LogicalBinaryOperation::LogicalOr:
369 return std::make_unique<NeonLogicalOrWorkload>(descriptor, info);
370 default:
371 return nullptr;
372 }
373}
374
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100375std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
376 const WorkloadInfo& info) const
377{
378 return MakeWorkloadHelper<NeonLstmFloatWorkload, NullWorkload>(descriptor, info);
379}
380
381std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
382 const WorkloadInfo& info) const
383{
384 return std::make_unique<NeonMaximumWorkload>(descriptor, info);
385}
386
387std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
388 const WorkloadInfo& info) const
389{
390 return std::make_unique<NeonMeanWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000391}
392
393std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
394 const WorkloadInfo& info) const
395{
396 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
397 {
398 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemCopy workload");
399 }
400
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100401 return MakeWorkloadHelper<CopyMemGenericWorkload, CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000402}
403
Derek Lambertif674aa02019-08-01 15:56:25 +0100404std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
405 const WorkloadInfo& info) const
406{
407 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
408 {
409 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemImport workload");
410 }
411
412 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
413}
414
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100415std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
416 const WorkloadInfo& info) const
417{
418 return CreateConcat(descriptor, info);
419}
420
421std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
422 const WorkloadInfo& info) const
423{
424 return std::make_unique<NeonMinimumWorkload>(descriptor, info);
425}
426
427std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMultiplication(
428 const MultiplicationQueueDescriptor& descriptor, const WorkloadInfo& info) const
429{
430 return std::make_unique<NeonMultiplicationWorkload>(descriptor, info);
431}
432
433std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateNormalization(
434 const NormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
435{
436 return MakeWorkloadHelper<NeonNormalizationFloatWorkload, NullWorkload>(descriptor, info,
437 m_MemoryManager->GetIntraLayerManager());
438}
439
440std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100441 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100442{
443 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
444}
445
446std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
447 const WorkloadInfo& info) const
448{
449 return std::make_unique<NeonPadWorkload>(descriptor, info);
450}
451
452std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100453 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100454{
455 return std::make_unique<NeonPermuteWorkload>(descriptor, info);
456}
457
458std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100459 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100460{
461 return std::make_unique<NeonPooling2dWorkload>(descriptor, info);
462}
463
464std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
465 const WorkloadInfo& info) const
466{
467 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
468}
469
470std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePrelu(const armnn::PreluQueueDescriptor &descriptor,
471 const armnn::WorkloadInfo &info) const
472{
473 return std::make_unique<NeonPreluWorkload>(descriptor, info);
474}
475
James Conroycc340932020-05-12 18:08:52 +0100476std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
477 const WorkloadInfo& info) const
478{
479 return std::make_unique<NeonQLstmWorkload>(descriptor, info);
480}
481
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100482std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
483 const WorkloadInfo& info) const
484{
Kevin May90774732020-03-03 12:10:10 +0000485 return std::make_unique<NeonQuantizeWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100486}
487
488std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
489 const WorkloadInfo& info) const
490{
491 return std::make_unique<NeonQuantizedLstmWorkload>(descriptor, info);
492}
493
David Monahan97451b42020-12-03 09:48:06 +0000494std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
495 const WorkloadInfo& info) const
496{
497 return std::make_unique<NeonRankWorkload>(descriptor, info);
498}
499
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100500std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
501 const WorkloadInfo& info) const
502{
503 return std::make_unique<NeonReshapeWorkload>(descriptor, info);
504}
505
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100506std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
507 const WorkloadInfo& info) const
508{
Ellen Norris-Thompson37e68682019-07-15 14:23:30 +0100509 return std::make_unique<NeonResizeWorkload>(descriptor, info);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100510}
511
telsoa014fcda012018-03-09 14:13:49 +0000512std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateResizeBilinear(
513 const ResizeBilinearQueueDescriptor& descriptor,
514 const WorkloadInfo& info) const
515{
Ellen Norris-Thompson37e68682019-07-15 14:23:30 +0100516 ResizeQueueDescriptor resizeDescriptor;
517 resizeDescriptor.m_Inputs = descriptor.m_Inputs;
518 resizeDescriptor.m_Outputs = descriptor.m_Outputs;
519
520 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
521 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
522 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
523
524 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000525}
526
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000527std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor &descriptor,
528 const WorkloadInfo &info) const
529{
Jan Eilers8eb25602020-03-09 12:13:48 +0000530 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600531
532 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
533 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt);
534
535 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000536}
537
josh minor036f02d2019-11-15 14:53:22 -0600538std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
539 const WorkloadInfo& info) const
540{
541 return std::make_unique<NeonSliceWorkload>(descriptor, info);
542}
543
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100544std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
545 const WorkloadInfo& info) const
Sadik Armagan581742d2019-08-12 14:11:37 +0100546{
Sadik Armaganbe88a572020-04-30 11:39:37 +0100547 return std::make_unique<NeonSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan581742d2019-08-12 14:11:37 +0100548}
549
Mike Kelly0be3a882020-01-24 11:27:50 +0000550std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
551 const WorkloadInfo& info) const
552{
553 return std::make_unique<NeonSpaceToBatchNdWorkload>(descriptor, info);
554}
555
556std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
557 const WorkloadInfo& info) const
narpra01b89b05f2019-01-16 09:53:09 +0000558{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100559 return std::make_unique<NeonSpaceToDepthWorkload>(descriptor, info);
560}
561
562std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
563 const WorkloadInfo& info) const
564{
565 return std::make_unique<NeonSplitterWorkload>(descriptor, info);
narpra01b89b05f2019-01-16 09:53:09 +0000566}
567
Matthew Jackson87f65ea2019-08-01 10:01:34 +0100568std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
569 const WorkloadInfo& info) const
570{
571 return std::make_unique<NeonStackWorkload>(descriptor, info);
572}
573
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100574std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
575 const WorkloadInfo& info) const
James Conroyd47a0642019-09-17 14:22:06 +0100576{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100577 return std::make_unique<NeonStridedSliceWorkload>(descriptor, info);
578}
579
580std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateSubtraction(
581 const SubtractionQueueDescriptor& descriptor, const WorkloadInfo& info) const
582{
583 return std::make_unique<NeonSubtractionWorkload>(descriptor, info);
584}
585
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000586std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
587 const WorkloadInfo& info) const
588{
589 return std::make_unique<NeonTransposeWorkload>(descriptor, info);
590}
591
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100592std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateTransposeConvolution2d(
593 const TransposeConvolution2dQueueDescriptor &descriptor,
594 const WorkloadInfo &info) const
595{
596 return std::make_unique<NeonTransposeConvolution2dWorkload>(descriptor, info,
597 m_MemoryManager->GetIntraLayerManager());
James Conroyd47a0642019-09-17 14:22:06 +0100598}
599
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000600} // namespace armnn