blob: 4c9397b95021bec88e0c982dba3c578e22df160e [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Keith Davis69e653f2020-07-02 11:49:26 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tar56055192018-11-12 18:10:43 +00005
David Beck79141b92018-10-23 16:09:36 +01006#include "NeonBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "NeonBackendModelContext.hpp"
Aron Virginas-Tar56055192018-11-12 18:10:43 +00008#include "NeonTensorHandle.hpp"
9#include "NeonWorkloadFactory.hpp"
10
David Beck0dbe0ee2018-09-24 15:59:27 +010011#include <Layer.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Aron Virginas-Tar56055192018-11-12 18:10:43 +000013#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000014#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010015#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010016#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010017
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/MakeWorkloadHelper.hpp>
Aron Virginas-Tar56055192018-11-12 18:10:43 +000019#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010020#include <backendsCommon/MemImportWorkload.hpp>
James Conroy1f58f032021-04-27 17:13:27 +010021#include <backendsCommon/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022
Aron Virginas-Tar56055192018-11-12 18:10:43 +000023#include <neon/workloads/NeonWorkloadUtils.hpp>
24#include <neon/workloads/NeonWorkloads.hpp>
25
telsoa014fcda012018-03-09 14:13:49 +000026namespace armnn
27{
28
David Beck79141b92018-10-23 16:09:36 +010029namespace
30{
31static const BackendId s_Id{NeonBackendId()};
32}
33
David Beck29c75de2018-10-23 13:35:58 +010034bool NeonWorkloadFactory::IsLayerSupported(const Layer& layer,
35 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010036 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000037{
David Beck79141b92018-10-23 16:09:36 +010038 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
39}
40
Sadik Armagan04a72972020-09-14 15:44:18 +010041bool NeonWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
42 Optional<DataType> dataType,
43 std::string& outReasonIfUnsupported,
44 const ModelOptions& modelOptions)
45{
46 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
47}
48
David Beck79141b92018-10-23 16:09:36 +010049const BackendId& NeonWorkloadFactory::GetBackendId() const
50{
51 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000052}
53
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000054void NeonWorkloadFactory::SetNumberOfThreads()
55{
56 if (m_ModelContextPtr)
57 {
58 const unsigned int MIN_THREADS = 1;
59 const unsigned int MAX_THREADS = 64;
60
61 // Set the number of threads to be used if the user has set NumberOfThreads param
62 // Only set if within limit or valid input
63 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
64 auto numberOfThreads = modelOptions->GetNumberOfThreads();
65
66 if (numberOfThreads != 0 && numberOfThreads >= MIN_THREADS && numberOfThreads <= MAX_THREADS)
67 {
68 arm_compute::Scheduler::get().set_num_threads(numberOfThreads);
69 }
70 }
71}
72
Aron Virginas-Tar56055192018-11-12 18:10:43 +000073NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +010074 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
75{
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000076 SetNumberOfThreads();
Sadik Armagan04a72972020-09-14 15:44:18 +010077}
78
79NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager,
80 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
81 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
surmeh013537c2c2018-05-18 16:31:43 +010082{
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000083 SetNumberOfThreads();
surmeh013537c2c2018-05-18 16:31:43 +010084}
85
telsoa014fcda012018-03-09 14:13:49 +000086std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
87 TensorShape const& subTensorShape,
88 unsigned int const* subTensorOrigin) const
89{
telsoa014fcda012018-03-09 14:13:49 +000090 const arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
91
92 arm_compute::Coordinates coords;
93 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
94 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
95 {
telsoa01c577f2c2018-08-31 09:22:23 +010096 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +000097 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +010098 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +000099 }
100
Derek Lamberti0790dce2019-04-15 18:37:35 +0100101 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
102 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
103 {
104 return nullptr;
105 }
106
telsoa01c577f2c2018-08-31 09:22:23 +0100107 return std::make_unique<NeonSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +0100108 PolymorphicDowncast<IAclTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +0000109}
110
David Monahan3fb7e102019-08-20 11:25:29 +0100111std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
112 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000113{
telsoa01c577f2c2018-08-31 09:22:23 +0100114 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo);
David Monahan3fb7e102019-08-20 11:25:29 +0100115 if (IsMemoryManaged)
116 {
117 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
118 }
telsoa01c577f2c2018-08-31 09:22:23 +0100119 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +0000120}
121
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100123 DataLayout dataLayout,
124 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100125{
126 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo, dataLayout);
David Monahan3fb7e102019-08-20 11:25:29 +0100127 if (IsMemoryManaged)
128 {
129 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
130 }
Francis Murtagh351d13d2018-09-24 15:01:18 +0100131 return tensorHandle;
132}
133
telsoa014fcda012018-03-09 14:13:49 +0000134std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
135 const WorkloadInfo& info) const
136{
Nattapat Chaimanowongd4b70592018-10-12 11:21:49 +0100137 return std::make_unique<NeonActivationWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000138}
139
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100140std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
141 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000142{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100143 return std::make_unique<NeonAdditionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000144}
145
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100146std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
147 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000148{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100149 return std::make_unique<NeonArgMinMaxWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000150}
151
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100152std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateBatchNormalization(
153 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
154{
155 return std::make_unique<NeonBatchNormalizationWorkload>(descriptor, info);
156}
157
158std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
159 const WorkloadInfo& info) const
160{
Mike Kelly56858022020-01-27 12:14:47 +0000161 return std::make_unique<NeonBatchToSpaceNdWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100162}
163
Sadik Armagan48f011e2021-04-21 10:50:34 +0100164std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
165 const WorkloadInfo& info) const
166{
167 return std::make_unique<NeonCastWorkload>(descriptor, info);
168}
169
Teresa Charline89dd692021-09-01 16:30:34 +0100170std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& descriptor,
171 const WorkloadInfo& info) const
172{
173 return std::make_unique<NeonChannelShuffleWorkload>(descriptor, info);
174}
175
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100176std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
177 const WorkloadInfo& info) const
178{
Teresa Charlincedd34f2020-03-30 11:17:30 +0100179 return std::make_unique<NeonComparisonWorkload>(descriptor, info);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100180}
181
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100182std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000183 const WorkloadInfo& info) const
184{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100185 return std::make_unique<NeonConcatWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000186}
187
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100188std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
189 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000190{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100191 return std::make_unique<NeonConstantWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000192}
193
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100194std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertBf16ToFp32(
195 const ConvertBf16ToFp32QueueDescriptor& descriptor,
196 const WorkloadInfo& info) const
197{
198 return std::make_unique<NeonConvertBf16ToFp32Workload>(descriptor, info);
199}
200
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100201std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp16ToFp32(
202 const ConvertFp16ToFp32QueueDescriptor& descriptor,
203 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000204{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100205 return std::make_unique<NeonConvertFp16ToFp32Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000206}
207
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100208std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToBf16(
209 const ConvertFp32ToBf16QueueDescriptor& descriptor,
210 const WorkloadInfo& info) const
211{
212 return std::make_unique<NeonConvertFp32ToBf16Workload>(descriptor, info);
213}
214
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100215std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToFp16(
216 const ConvertFp32ToFp16QueueDescriptor& descriptor,
217 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000218{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100219 return std::make_unique<NeonConvertFp32ToFp16Workload>(descriptor, info);
Nikhil Raj9b461482019-07-03 15:58:31 +0100220}
221
telsoa014fcda012018-03-09 14:13:49 +0000222std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConvolution2d(
223 const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
224{
Sadik Armagan04a72972020-09-14 15:44:18 +0100225 bool isFastMathEnabled = false;
226 if (m_ModelContextPtr)
227 {
228 if (m_ModelContextPtr.get() != nullptr)
229 {
230 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
231 if (modelOptions)
232 {
233 isFastMathEnabled = modelOptions->IsFastMathEnabled();
234 }
235 }
236 }
237 return std::make_unique<NeonConvolution2dWorkload>(descriptor,
238 info,
239 m_MemoryManager->GetIntraLayerManager(),
240 isFastMathEnabled);
telsoa014fcda012018-03-09 14:13:49 +0000241}
242
Teresa Charlinec5f7d12021-10-22 17:15:00 +0100243std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConvolution3d(
244 const Convolution3dQueueDescriptor& descriptor, const WorkloadInfo& info) const
245{
246 bool isFastMathEnabled = false;
247 if (m_ModelContextPtr)
248 {
249 if (m_ModelContextPtr.get() != nullptr)
250 {
251 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
252 if (modelOptions)
253 {
254 isFastMathEnabled = modelOptions->IsFastMathEnabled();
255 }
256 }
257 }
258 return std::make_unique<NeonConvolution3dWorkload>(descriptor,
259 info,
260 m_MemoryManager->GetIntraLayerManager(),
261 isFastMathEnabled);
262}
263
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100264std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
265 const WorkloadInfo& info) const
266{
267 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
268}
269
Aron Virginas-Tar2f00b742019-09-30 13:28:08 +0100270std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
271 const WorkloadInfo& info) const
272{
273 return std::make_unique<NeonDepthToSpaceWorkload>(descriptor, info);
274}
275
telsoa014fcda012018-03-09 14:13:49 +0000276std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthwiseConvolution2d(
277 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
278{
Nattapat Chaimanowong77140882018-10-17 11:12:19 +0100279 return std::make_unique<NeonDepthwiseConvolutionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000280}
281
Narumol Prangnawarat01961a72019-05-30 16:47:12 +0100282std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
283 const WorkloadInfo& info) const
284{
285 return std::make_unique<NeonDequantizeWorkload>(descriptor, info);
286}
287
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000288std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDetectionPostProcess(
289 const armnn::DetectionPostProcessQueueDescriptor& descriptor, const armnn::WorkloadInfo& info) const
290{
James Conroyd9fb6e22020-02-21 16:52:44 +0000291 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000292}
293
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100294std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateDivision(
295 const DivisionQueueDescriptor& descriptor, const WorkloadInfo& info) const
296{
Pablo Telloe61f0712020-01-23 10:37:17 +0000297 return std::make_unique<NeonDivisionWorkload>(descriptor, info);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100298}
299
Sadik Armaganac472102020-03-24 09:54:36 +0000300std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateElementwiseUnary(
301 const ElementwiseUnaryQueueDescriptor& descriptor, const WorkloadInfo& info) const
josh minor4a3c6102020-01-06 16:40:46 -0600302{
Derek Lambertic77874a2020-04-28 13:34:56 +0100303 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600304 {
Derek Lambertic77874a2020-04-28 13:34:56 +0100305 case UnaryOperation::Abs:
James Conroy177df1e2020-11-13 10:18:51 +0000306 {
307 AbsQueueDescriptor absQueueDescriptor;
308 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
309 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600310
James Conroy177df1e2020-11-13 10:18:51 +0000311 return std::make_unique<NeonAbsWorkload>(absQueueDescriptor, info);
312 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100313 case UnaryOperation::Exp:
314 return std::make_unique<NeonExpWorkload>(descriptor, info);
315 case UnaryOperation::LogicalNot:
316 return std::make_unique<NeonLogicalNotWorkload>(descriptor, info);
317 case UnaryOperation::Log:
318 return std::make_unique<NeonLogWorkload>(descriptor, info);
319 case UnaryOperation::Neg:
320 return std::make_unique<NeonNegWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100321 case UnaryOperation::Rsqrt:
James Conroy177df1e2020-11-13 10:18:51 +0000322 {
323 RsqrtQueueDescriptor rsqrtQueueDescriptor;
324 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
325 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600326
James Conroy177df1e2020-11-13 10:18:51 +0000327 return std::make_unique<NeonRsqrtWorkload>(rsqrtQueueDescriptor, info);
328 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100329 case UnaryOperation::Sin:
330 return std::make_unique<NeonSinWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100331 default:
332 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600333 }
josh minor4a3c6102020-01-06 16:40:46 -0600334}
335
Sadik Armagana792a052020-06-23 16:22:23 +0100336std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
337 const WorkloadInfo& info) const
338{
339 return std::make_unique<NeonFillWorkload>(descriptor, info);
340}
341
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100342std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
343 const WorkloadInfo& info) const
344{
345 return MakeWorkloadHelper<NeonFloorFloatWorkload, NullWorkload>(descriptor, info);
346}
347
348std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateFullyConnected(
349 const FullyConnectedQueueDescriptor& descriptor, const WorkloadInfo& info) const
350{
Kevin Maybe7e35c2020-04-29 17:05:05 +0100351 return std::make_unique<NeonFullyConnectedWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100352}
353
354std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateGather(const armnn::GatherQueueDescriptor& descriptor,
355 const armnn::WorkloadInfo& info) const
356{
Teresa Charlinf540eb82020-04-10 19:24:55 +0100357 return std::make_unique<NeonGatherWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100358}
359
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100360std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
361 const WorkloadInfo& info) const
362{
363 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
364}
365
366std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInstanceNormalization(
367 const InstanceNormalizationQueueDescriptor& descriptor,
368 const WorkloadInfo& info) const
369{
370 return std::make_unique<NeonInstanceNormalizationWorkload>(descriptor, info);
371}
372
373std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
374 const WorkloadInfo& info) const
375{
376 return MakeWorkloadHelper<NeonL2NormalizationFloatWorkload, NullWorkload>(descriptor, info,
377 m_MemoryManager->GetIntraLayerManager());
378}
379
Keith Davis69e653f2020-07-02 11:49:26 +0100380std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
381 const WorkloadInfo& info) const
382{
383 return std::make_unique<NeonLogSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
384}
385
James Conroy177df1e2020-11-13 10:18:51 +0000386std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
387 const WorkloadInfo& info) const
388{
389 switch(descriptor.m_Parameters.m_Operation)
390 {
391 case LogicalBinaryOperation::LogicalAnd:
392 return std::make_unique<NeonLogicalAndWorkload>(descriptor, info);
393 case LogicalBinaryOperation::LogicalOr:
394 return std::make_unique<NeonLogicalOrWorkload>(descriptor, info);
395 default:
396 return nullptr;
397 }
398}
399
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100400std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
401 const WorkloadInfo& info) const
402{
403 return MakeWorkloadHelper<NeonLstmFloatWorkload, NullWorkload>(descriptor, info);
404}
405
406std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
407 const WorkloadInfo& info) const
408{
409 return std::make_unique<NeonMaximumWorkload>(descriptor, info);
410}
411
412std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
413 const WorkloadInfo& info) const
414{
415 return std::make_unique<NeonMeanWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000416}
417
418std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
419 const WorkloadInfo& info) const
420{
421 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
422 {
423 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemCopy workload");
424 }
425
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100426 return MakeWorkloadHelper<CopyMemGenericWorkload, CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000427}
428
Derek Lambertif674aa02019-08-01 15:56:25 +0100429std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
430 const WorkloadInfo& info) const
431{
432 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
433 {
434 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemImport workload");
435 }
436
437 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
438}
439
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100440std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
441 const WorkloadInfo& info) const
442{
443 return std::make_unique<NeonMinimumWorkload>(descriptor, info);
444}
445
446std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMultiplication(
447 const MultiplicationQueueDescriptor& descriptor, const WorkloadInfo& info) const
448{
449 return std::make_unique<NeonMultiplicationWorkload>(descriptor, info);
450}
451
452std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateNormalization(
453 const NormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
454{
455 return MakeWorkloadHelper<NeonNormalizationFloatWorkload, NullWorkload>(descriptor, info,
456 m_MemoryManager->GetIntraLayerManager());
457}
458
459std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100460 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100461{
462 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
463}
464
465std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
466 const WorkloadInfo& info) const
467{
468 return std::make_unique<NeonPadWorkload>(descriptor, info);
469}
470
471std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100472 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100473{
474 return std::make_unique<NeonPermuteWorkload>(descriptor, info);
475}
476
477std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100478 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100479{
480 return std::make_unique<NeonPooling2dWorkload>(descriptor, info);
481}
482
483std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
484 const WorkloadInfo& info) const
485{
486 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
487}
488
489std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePrelu(const armnn::PreluQueueDescriptor &descriptor,
490 const armnn::WorkloadInfo &info) const
491{
492 return std::make_unique<NeonPreluWorkload>(descriptor, info);
493}
494
James Conroycc340932020-05-12 18:08:52 +0100495std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
496 const WorkloadInfo& info) const
497{
498 return std::make_unique<NeonQLstmWorkload>(descriptor, info);
499}
500
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100501std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
502 const WorkloadInfo& info) const
503{
Kevin May90774732020-03-03 12:10:10 +0000504 return std::make_unique<NeonQuantizeWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100505}
506
507std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
508 const WorkloadInfo& info) const
509{
510 return std::make_unique<NeonQuantizedLstmWorkload>(descriptor, info);
511}
512
David Monahan97451b42020-12-03 09:48:06 +0000513std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
514 const WorkloadInfo& info) const
515{
516 return std::make_unique<NeonRankWorkload>(descriptor, info);
517}
518
Sadik Armagana2747482021-02-09 10:28:54 +0000519std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
520 const WorkloadInfo& info) const
521{
522 return std::make_unique<NeonReduceWorkload>(descriptor, info);
523}
524
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100525std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
526 const WorkloadInfo& info) const
527{
528 return std::make_unique<NeonReshapeWorkload>(descriptor, info);
529}
530
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100531std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
532 const WorkloadInfo& info) const
533{
Ellen Norris-Thompson37e68682019-07-15 14:23:30 +0100534 return std::make_unique<NeonResizeWorkload>(descriptor, info);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100535}
536
josh minor036f02d2019-11-15 14:53:22 -0600537std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
538 const WorkloadInfo& info) const
539{
540 return std::make_unique<NeonSliceWorkload>(descriptor, info);
541}
542
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100543std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
544 const WorkloadInfo& info) const
Sadik Armagan581742d2019-08-12 14:11:37 +0100545{
Sadik Armaganbe88a572020-04-30 11:39:37 +0100546 return std::make_unique<NeonSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan581742d2019-08-12 14:11:37 +0100547}
548
Mike Kelly0be3a882020-01-24 11:27:50 +0000549std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
550 const WorkloadInfo& info) const
551{
552 return std::make_unique<NeonSpaceToBatchNdWorkload>(descriptor, info);
553}
554
555std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
556 const WorkloadInfo& info) const
narpra01b89b05f2019-01-16 09:53:09 +0000557{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100558 return std::make_unique<NeonSpaceToDepthWorkload>(descriptor, info);
559}
560
561std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
562 const WorkloadInfo& info) const
563{
564 return std::make_unique<NeonSplitterWorkload>(descriptor, info);
narpra01b89b05f2019-01-16 09:53:09 +0000565}
566
Matthew Jackson87f65ea2019-08-01 10:01:34 +0100567std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
568 const WorkloadInfo& info) const
569{
570 return std::make_unique<NeonStackWorkload>(descriptor, info);
571}
572
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100573std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
574 const WorkloadInfo& info) const
James Conroyd47a0642019-09-17 14:22:06 +0100575{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100576 return std::make_unique<NeonStridedSliceWorkload>(descriptor, info);
577}
578
579std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateSubtraction(
580 const SubtractionQueueDescriptor& descriptor, const WorkloadInfo& info) const
581{
582 return std::make_unique<NeonSubtractionWorkload>(descriptor, info);
583}
584
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000585std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
586 const WorkloadInfo& info) const
587{
588 return std::make_unique<NeonTransposeWorkload>(descriptor, info);
589}
590
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100591std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateTransposeConvolution2d(
592 const TransposeConvolution2dQueueDescriptor &descriptor,
593 const WorkloadInfo &info) const
594{
595 return std::make_unique<NeonTransposeConvolution2dWorkload>(descriptor, info,
596 m_MemoryManager->GetIntraLayerManager());
James Conroyd47a0642019-09-17 14:22:06 +0100597}
598
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000599} // namespace armnn