blob: 605b03d6b701b17dbaf4d88e95caff9ade8493d2 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Keith Davis69e653f2020-07-02 11:49:26 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tar56055192018-11-12 18:10:43 +00005
David Beck79141b92018-10-23 16:09:36 +01006#include "NeonBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "NeonBackendModelContext.hpp"
Aron Virginas-Tar56055192018-11-12 18:10:43 +00008#include "NeonTensorHandle.hpp"
9#include "NeonWorkloadFactory.hpp"
10
David Beck0dbe0ee2018-09-24 15:59:27 +010011#include <Layer.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Aron Virginas-Tar56055192018-11-12 18:10:43 +000013#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000014#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010015#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010016#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010017
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/MakeWorkloadHelper.hpp>
Aron Virginas-Tar56055192018-11-12 18:10:43 +000019#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010020#include <backendsCommon/MemImportWorkload.hpp>
James Conroy1f58f032021-04-27 17:13:27 +010021#include <backendsCommon/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022
Aron Virginas-Tar56055192018-11-12 18:10:43 +000023#include <neon/workloads/NeonWorkloadUtils.hpp>
24#include <neon/workloads/NeonWorkloads.hpp>
25
telsoa014fcda012018-03-09 14:13:49 +000026namespace armnn
27{
28
David Beck79141b92018-10-23 16:09:36 +010029namespace
30{
31static const BackendId s_Id{NeonBackendId()};
32}
33
David Beck29c75de2018-10-23 13:35:58 +010034bool NeonWorkloadFactory::IsLayerSupported(const Layer& layer,
35 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010036 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000037{
David Beck79141b92018-10-23 16:09:36 +010038 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
39}
40
Sadik Armagan04a72972020-09-14 15:44:18 +010041bool NeonWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
42 Optional<DataType> dataType,
43 std::string& outReasonIfUnsupported,
44 const ModelOptions& modelOptions)
45{
46 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
47}
48
David Beck79141b92018-10-23 16:09:36 +010049const BackendId& NeonWorkloadFactory::GetBackendId() const
50{
51 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000052}
53
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000054void NeonWorkloadFactory::SetNumberOfThreads()
55{
56 if (m_ModelContextPtr)
57 {
58 const unsigned int MIN_THREADS = 1;
59 const unsigned int MAX_THREADS = 64;
60
61 // Set the number of threads to be used if the user has set NumberOfThreads param
62 // Only set if within limit or valid input
63 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
64 auto numberOfThreads = modelOptions->GetNumberOfThreads();
65
66 if (numberOfThreads != 0 && numberOfThreads >= MIN_THREADS && numberOfThreads <= MAX_THREADS)
67 {
68 arm_compute::Scheduler::get().set_num_threads(numberOfThreads);
69 }
70 }
71}
72
Aron Virginas-Tar56055192018-11-12 18:10:43 +000073NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +010074 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
75{
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000076 SetNumberOfThreads();
Sadik Armagan04a72972020-09-14 15:44:18 +010077}
78
79NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager,
80 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
81 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
surmeh013537c2c2018-05-18 16:31:43 +010082{
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000083 SetNumberOfThreads();
surmeh013537c2c2018-05-18 16:31:43 +010084}
85
telsoa014fcda012018-03-09 14:13:49 +000086std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
87 TensorShape const& subTensorShape,
88 unsigned int const* subTensorOrigin) const
89{
telsoa014fcda012018-03-09 14:13:49 +000090 const arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
91
92 arm_compute::Coordinates coords;
93 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
94 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
95 {
telsoa01c577f2c2018-08-31 09:22:23 +010096 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +000097 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +010098 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +000099 }
100
Derek Lamberti0790dce2019-04-15 18:37:35 +0100101 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
102 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
103 {
104 return nullptr;
105 }
106
telsoa01c577f2c2018-08-31 09:22:23 +0100107 return std::make_unique<NeonSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +0100108 PolymorphicDowncast<IAclTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +0000109}
110
David Monahan3fb7e102019-08-20 11:25:29 +0100111std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
112 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000113{
telsoa01c577f2c2018-08-31 09:22:23 +0100114 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo);
David Monahan3fb7e102019-08-20 11:25:29 +0100115 if (IsMemoryManaged)
116 {
117 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
118 }
telsoa01c577f2c2018-08-31 09:22:23 +0100119 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +0000120}
121
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100123 DataLayout dataLayout,
124 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100125{
126 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo, dataLayout);
David Monahan3fb7e102019-08-20 11:25:29 +0100127 if (IsMemoryManaged)
128 {
129 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
130 }
Francis Murtagh351d13d2018-09-24 15:01:18 +0100131 return tensorHandle;
132}
133
telsoa014fcda012018-03-09 14:13:49 +0000134std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
135 const WorkloadInfo& info) const
136{
Nattapat Chaimanowongd4b70592018-10-12 11:21:49 +0100137 return std::make_unique<NeonActivationWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000138}
139
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100140std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
141 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000142{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100143 return std::make_unique<NeonAdditionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000144}
145
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100146std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
147 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000148{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100149 return std::make_unique<NeonArgMinMaxWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000150}
151
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100152std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateBatchNormalization(
153 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
154{
155 return std::make_unique<NeonBatchNormalizationWorkload>(descriptor, info);
156}
157
158std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
159 const WorkloadInfo& info) const
160{
Mike Kelly56858022020-01-27 12:14:47 +0000161 return std::make_unique<NeonBatchToSpaceNdWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100162}
163
Sadik Armagan48f011e2021-04-21 10:50:34 +0100164std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
165 const WorkloadInfo& info) const
166{
167 return std::make_unique<NeonCastWorkload>(descriptor, info);
168}
169
Teresa Charline89dd692021-09-01 16:30:34 +0100170std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& descriptor,
171 const WorkloadInfo& info) const
172{
173 return std::make_unique<NeonChannelShuffleWorkload>(descriptor, info);
174}
175
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100176std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
177 const WorkloadInfo& info) const
178{
Teresa Charlincedd34f2020-03-30 11:17:30 +0100179 return std::make_unique<NeonComparisonWorkload>(descriptor, info);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100180}
181
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100182std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000183 const WorkloadInfo& info) const
184{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100185 return std::make_unique<NeonConcatWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000186}
187
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100188std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
189 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000190{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100191 return std::make_unique<NeonConstantWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000192}
193
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100194std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertBf16ToFp32(
195 const ConvertBf16ToFp32QueueDescriptor& descriptor,
196 const WorkloadInfo& info) const
197{
198 return std::make_unique<NeonConvertBf16ToFp32Workload>(descriptor, info);
199}
200
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100201std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp16ToFp32(
202 const ConvertFp16ToFp32QueueDescriptor& descriptor,
203 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000204{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100205 return std::make_unique<NeonConvertFp16ToFp32Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000206}
207
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100208std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToBf16(
209 const ConvertFp32ToBf16QueueDescriptor& descriptor,
210 const WorkloadInfo& info) const
211{
212 return std::make_unique<NeonConvertFp32ToBf16Workload>(descriptor, info);
213}
214
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100215std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToFp16(
216 const ConvertFp32ToFp16QueueDescriptor& descriptor,
217 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000218{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100219 return std::make_unique<NeonConvertFp32ToFp16Workload>(descriptor, info);
Nikhil Raj9b461482019-07-03 15:58:31 +0100220}
221
telsoa014fcda012018-03-09 14:13:49 +0000222std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConvolution2d(
223 const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
224{
Sadik Armagan04a72972020-09-14 15:44:18 +0100225 bool isFastMathEnabled = false;
226 if (m_ModelContextPtr)
227 {
228 if (m_ModelContextPtr.get() != nullptr)
229 {
230 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
231 if (modelOptions)
232 {
233 isFastMathEnabled = modelOptions->IsFastMathEnabled();
234 }
235 }
236 }
237 return std::make_unique<NeonConvolution2dWorkload>(descriptor,
238 info,
239 m_MemoryManager->GetIntraLayerManager(),
240 isFastMathEnabled);
telsoa014fcda012018-03-09 14:13:49 +0000241}
242
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100243std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
244 const WorkloadInfo& info) const
245{
246 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
247}
248
Aron Virginas-Tar2f00b742019-09-30 13:28:08 +0100249std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
250 const WorkloadInfo& info) const
251{
252 return std::make_unique<NeonDepthToSpaceWorkload>(descriptor, info);
253}
254
telsoa014fcda012018-03-09 14:13:49 +0000255std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthwiseConvolution2d(
256 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
257{
Nattapat Chaimanowong77140882018-10-17 11:12:19 +0100258 return std::make_unique<NeonDepthwiseConvolutionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000259}
260
Narumol Prangnawarat01961a72019-05-30 16:47:12 +0100261std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
262 const WorkloadInfo& info) const
263{
264 return std::make_unique<NeonDequantizeWorkload>(descriptor, info);
265}
266
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000267std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDetectionPostProcess(
268 const armnn::DetectionPostProcessQueueDescriptor& descriptor, const armnn::WorkloadInfo& info) const
269{
James Conroyd9fb6e22020-02-21 16:52:44 +0000270 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000271}
272
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100273std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateDivision(
274 const DivisionQueueDescriptor& descriptor, const WorkloadInfo& info) const
275{
Pablo Telloe61f0712020-01-23 10:37:17 +0000276 return std::make_unique<NeonDivisionWorkload>(descriptor, info);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100277}
278
Sadik Armaganac472102020-03-24 09:54:36 +0000279std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateElementwiseUnary(
280 const ElementwiseUnaryQueueDescriptor& descriptor, const WorkloadInfo& info) const
josh minor4a3c6102020-01-06 16:40:46 -0600281{
Derek Lambertic77874a2020-04-28 13:34:56 +0100282 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600283 {
Derek Lambertic77874a2020-04-28 13:34:56 +0100284 case UnaryOperation::Abs:
James Conroy177df1e2020-11-13 10:18:51 +0000285 {
286 AbsQueueDescriptor absQueueDescriptor;
287 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
288 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600289
James Conroy177df1e2020-11-13 10:18:51 +0000290 return std::make_unique<NeonAbsWorkload>(absQueueDescriptor, info);
291 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100292 case UnaryOperation::Exp:
293 return std::make_unique<NeonExpWorkload>(descriptor, info);
294 case UnaryOperation::LogicalNot:
295 return std::make_unique<NeonLogicalNotWorkload>(descriptor, info);
296 case UnaryOperation::Log:
297 return std::make_unique<NeonLogWorkload>(descriptor, info);
298 case UnaryOperation::Neg:
299 return std::make_unique<NeonNegWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100300 case UnaryOperation::Rsqrt:
James Conroy177df1e2020-11-13 10:18:51 +0000301 {
302 RsqrtQueueDescriptor rsqrtQueueDescriptor;
303 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
304 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600305
James Conroy177df1e2020-11-13 10:18:51 +0000306 return std::make_unique<NeonRsqrtWorkload>(rsqrtQueueDescriptor, info);
307 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100308 case UnaryOperation::Sin:
309 return std::make_unique<NeonSinWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100310 default:
311 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600312 }
josh minor4a3c6102020-01-06 16:40:46 -0600313}
314
Sadik Armagana792a052020-06-23 16:22:23 +0100315std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
316 const WorkloadInfo& info) const
317{
318 return std::make_unique<NeonFillWorkload>(descriptor, info);
319}
320
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100321std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
322 const WorkloadInfo& info) const
323{
324 return MakeWorkloadHelper<NeonFloorFloatWorkload, NullWorkload>(descriptor, info);
325}
326
327std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateFullyConnected(
328 const FullyConnectedQueueDescriptor& descriptor, const WorkloadInfo& info) const
329{
Kevin Maybe7e35c2020-04-29 17:05:05 +0100330 return std::make_unique<NeonFullyConnectedWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100331}
332
333std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateGather(const armnn::GatherQueueDescriptor& descriptor,
334 const armnn::WorkloadInfo& info) const
335{
Teresa Charlinf540eb82020-04-10 19:24:55 +0100336 return std::make_unique<NeonGatherWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100337}
338
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100339std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
340 const WorkloadInfo& info) const
341{
342 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
343}
344
345std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInstanceNormalization(
346 const InstanceNormalizationQueueDescriptor& descriptor,
347 const WorkloadInfo& info) const
348{
349 return std::make_unique<NeonInstanceNormalizationWorkload>(descriptor, info);
350}
351
352std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
353 const WorkloadInfo& info) const
354{
355 return MakeWorkloadHelper<NeonL2NormalizationFloatWorkload, NullWorkload>(descriptor, info,
356 m_MemoryManager->GetIntraLayerManager());
357}
358
Keith Davis69e653f2020-07-02 11:49:26 +0100359std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
360 const WorkloadInfo& info) const
361{
362 return std::make_unique<NeonLogSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
363}
364
James Conroy177df1e2020-11-13 10:18:51 +0000365std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
366 const WorkloadInfo& info) const
367{
368 switch(descriptor.m_Parameters.m_Operation)
369 {
370 case LogicalBinaryOperation::LogicalAnd:
371 return std::make_unique<NeonLogicalAndWorkload>(descriptor, info);
372 case LogicalBinaryOperation::LogicalOr:
373 return std::make_unique<NeonLogicalOrWorkload>(descriptor, info);
374 default:
375 return nullptr;
376 }
377}
378
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100379std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
380 const WorkloadInfo& info) const
381{
382 return MakeWorkloadHelper<NeonLstmFloatWorkload, NullWorkload>(descriptor, info);
383}
384
385std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
386 const WorkloadInfo& info) const
387{
388 return std::make_unique<NeonMaximumWorkload>(descriptor, info);
389}
390
391std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
392 const WorkloadInfo& info) const
393{
394 return std::make_unique<NeonMeanWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000395}
396
397std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
398 const WorkloadInfo& info) const
399{
400 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
401 {
402 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemCopy workload");
403 }
404
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100405 return MakeWorkloadHelper<CopyMemGenericWorkload, CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000406}
407
Derek Lambertif674aa02019-08-01 15:56:25 +0100408std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
409 const WorkloadInfo& info) const
410{
411 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
412 {
413 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemImport workload");
414 }
415
416 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
417}
418
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100419std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
420 const WorkloadInfo& info) const
421{
422 return std::make_unique<NeonMinimumWorkload>(descriptor, info);
423}
424
425std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMultiplication(
426 const MultiplicationQueueDescriptor& descriptor, const WorkloadInfo& info) const
427{
428 return std::make_unique<NeonMultiplicationWorkload>(descriptor, info);
429}
430
431std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateNormalization(
432 const NormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
433{
434 return MakeWorkloadHelper<NeonNormalizationFloatWorkload, NullWorkload>(descriptor, info,
435 m_MemoryManager->GetIntraLayerManager());
436}
437
438std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100439 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100440{
441 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
442}
443
444std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
445 const WorkloadInfo& info) const
446{
447 return std::make_unique<NeonPadWorkload>(descriptor, info);
448}
449
450std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100451 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100452{
453 return std::make_unique<NeonPermuteWorkload>(descriptor, info);
454}
455
456std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100457 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100458{
459 return std::make_unique<NeonPooling2dWorkload>(descriptor, info);
460}
461
462std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
463 const WorkloadInfo& info) const
464{
465 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
466}
467
468std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePrelu(const armnn::PreluQueueDescriptor &descriptor,
469 const armnn::WorkloadInfo &info) const
470{
471 return std::make_unique<NeonPreluWorkload>(descriptor, info);
472}
473
James Conroycc340932020-05-12 18:08:52 +0100474std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
475 const WorkloadInfo& info) const
476{
477 return std::make_unique<NeonQLstmWorkload>(descriptor, info);
478}
479
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100480std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
481 const WorkloadInfo& info) const
482{
Kevin May90774732020-03-03 12:10:10 +0000483 return std::make_unique<NeonQuantizeWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100484}
485
486std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
487 const WorkloadInfo& info) const
488{
489 return std::make_unique<NeonQuantizedLstmWorkload>(descriptor, info);
490}
491
David Monahan97451b42020-12-03 09:48:06 +0000492std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
493 const WorkloadInfo& info) const
494{
495 return std::make_unique<NeonRankWorkload>(descriptor, info);
496}
497
Sadik Armagana2747482021-02-09 10:28:54 +0000498std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
499 const WorkloadInfo& info) const
500{
501 return std::make_unique<NeonReduceWorkload>(descriptor, info);
502}
503
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100504std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
505 const WorkloadInfo& info) const
506{
507 return std::make_unique<NeonReshapeWorkload>(descriptor, info);
508}
509
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100510std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
511 const WorkloadInfo& info) const
512{
Ellen Norris-Thompson37e68682019-07-15 14:23:30 +0100513 return std::make_unique<NeonResizeWorkload>(descriptor, info);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100514}
515
josh minor036f02d2019-11-15 14:53:22 -0600516std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
517 const WorkloadInfo& info) const
518{
519 return std::make_unique<NeonSliceWorkload>(descriptor, info);
520}
521
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100522std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
523 const WorkloadInfo& info) const
Sadik Armagan581742d2019-08-12 14:11:37 +0100524{
Sadik Armaganbe88a572020-04-30 11:39:37 +0100525 return std::make_unique<NeonSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan581742d2019-08-12 14:11:37 +0100526}
527
Mike Kelly0be3a882020-01-24 11:27:50 +0000528std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
529 const WorkloadInfo& info) const
530{
531 return std::make_unique<NeonSpaceToBatchNdWorkload>(descriptor, info);
532}
533
534std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
535 const WorkloadInfo& info) const
narpra01b89b05f2019-01-16 09:53:09 +0000536{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100537 return std::make_unique<NeonSpaceToDepthWorkload>(descriptor, info);
538}
539
540std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
541 const WorkloadInfo& info) const
542{
543 return std::make_unique<NeonSplitterWorkload>(descriptor, info);
narpra01b89b05f2019-01-16 09:53:09 +0000544}
545
Matthew Jackson87f65ea2019-08-01 10:01:34 +0100546std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
547 const WorkloadInfo& info) const
548{
549 return std::make_unique<NeonStackWorkload>(descriptor, info);
550}
551
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100552std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
553 const WorkloadInfo& info) const
James Conroyd47a0642019-09-17 14:22:06 +0100554{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100555 return std::make_unique<NeonStridedSliceWorkload>(descriptor, info);
556}
557
558std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateSubtraction(
559 const SubtractionQueueDescriptor& descriptor, const WorkloadInfo& info) const
560{
561 return std::make_unique<NeonSubtractionWorkload>(descriptor, info);
562}
563
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000564std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
565 const WorkloadInfo& info) const
566{
567 return std::make_unique<NeonTransposeWorkload>(descriptor, info);
568}
569
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100570std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateTransposeConvolution2d(
571 const TransposeConvolution2dQueueDescriptor &descriptor,
572 const WorkloadInfo &info) const
573{
574 return std::make_unique<NeonTransposeConvolution2dWorkload>(descriptor, info,
575 m_MemoryManager->GetIntraLayerManager());
James Conroyd47a0642019-09-17 14:22:06 +0100576}
577
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000578} // namespace armnn