blob: 9ec7583b1800ee7e72b14769cc7ddce3a1cc0c46 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Keith Davis69e653f2020-07-02 11:49:26 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tar56055192018-11-12 18:10:43 +00005
David Beck79141b92018-10-23 16:09:36 +01006#include "NeonBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "NeonBackendModelContext.hpp"
Aron Virginas-Tar56055192018-11-12 18:10:43 +00008#include "NeonTensorHandle.hpp"
9#include "NeonWorkloadFactory.hpp"
10
David Beck0dbe0ee2018-09-24 15:59:27 +010011#include <Layer.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Aron Virginas-Tar56055192018-11-12 18:10:43 +000013#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000014#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010015#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010016#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010017
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/MakeWorkloadHelper.hpp>
Aron Virginas-Tar56055192018-11-12 18:10:43 +000019#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010020#include <backendsCommon/MemImportWorkload.hpp>
James Conroy1f58f032021-04-27 17:13:27 +010021#include <backendsCommon/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022
Aron Virginas-Tar56055192018-11-12 18:10:43 +000023#include <neon/workloads/NeonWorkloadUtils.hpp>
24#include <neon/workloads/NeonWorkloads.hpp>
25
telsoa014fcda012018-03-09 14:13:49 +000026namespace armnn
27{
28
David Beck79141b92018-10-23 16:09:36 +010029namespace
30{
31static const BackendId s_Id{NeonBackendId()};
32}
33
David Beck29c75de2018-10-23 13:35:58 +010034bool NeonWorkloadFactory::IsLayerSupported(const Layer& layer,
35 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010036 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000037{
David Beck79141b92018-10-23 16:09:36 +010038 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
39}
40
Sadik Armagan04a72972020-09-14 15:44:18 +010041bool NeonWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
42 Optional<DataType> dataType,
43 std::string& outReasonIfUnsupported,
44 const ModelOptions& modelOptions)
45{
46 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
47}
48
David Beck79141b92018-10-23 16:09:36 +010049const BackendId& NeonWorkloadFactory::GetBackendId() const
50{
51 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000052}
53
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000054void NeonWorkloadFactory::SetNumberOfThreads()
55{
56 if (m_ModelContextPtr)
57 {
58 const unsigned int MIN_THREADS = 1;
59 const unsigned int MAX_THREADS = 64;
60
61 // Set the number of threads to be used if the user has set NumberOfThreads param
62 // Only set if within limit or valid input
63 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
64 auto numberOfThreads = modelOptions->GetNumberOfThreads();
65
66 if (numberOfThreads != 0 && numberOfThreads >= MIN_THREADS && numberOfThreads <= MAX_THREADS)
67 {
68 arm_compute::Scheduler::get().set_num_threads(numberOfThreads);
69 }
70 }
71}
72
Aron Virginas-Tar56055192018-11-12 18:10:43 +000073NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +010074 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
75{
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000076 SetNumberOfThreads();
Sadik Armagan04a72972020-09-14 15:44:18 +010077}
78
79NeonWorkloadFactory::NeonWorkloadFactory(const std::shared_ptr<NeonMemoryManager>& memoryManager,
80 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
81 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
surmeh013537c2c2018-05-18 16:31:43 +010082{
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000083 SetNumberOfThreads();
surmeh013537c2c2018-05-18 16:31:43 +010084}
85
telsoa014fcda012018-03-09 14:13:49 +000086std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
87 TensorShape const& subTensorShape,
88 unsigned int const* subTensorOrigin) const
89{
telsoa014fcda012018-03-09 14:13:49 +000090 const arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
91
92 arm_compute::Coordinates coords;
93 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
94 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
95 {
telsoa01c577f2c2018-08-31 09:22:23 +010096 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +000097 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +010098 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +000099 }
100
Derek Lamberti0790dce2019-04-15 18:37:35 +0100101 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
102 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
103 {
104 return nullptr;
105 }
106
telsoa01c577f2c2018-08-31 09:22:23 +0100107 return std::make_unique<NeonSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +0100108 PolymorphicDowncast<IAclTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +0000109}
110
David Monahan3fb7e102019-08-20 11:25:29 +0100111std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
112 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000113{
telsoa01c577f2c2018-08-31 09:22:23 +0100114 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo);
David Monahan3fb7e102019-08-20 11:25:29 +0100115 if (IsMemoryManaged)
116 {
117 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
118 }
telsoa01c577f2c2018-08-31 09:22:23 +0100119 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +0000120}
121
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122std::unique_ptr<ITensorHandle> NeonWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100123 DataLayout dataLayout,
124 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100125{
126 auto tensorHandle = std::make_unique<NeonTensorHandle>(tensorInfo, dataLayout);
David Monahan3fb7e102019-08-20 11:25:29 +0100127 if (IsMemoryManaged)
128 {
129 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
130 }
Francis Murtagh351d13d2018-09-24 15:01:18 +0100131 return tensorHandle;
132}
133
Aron Virginas-Tar914e4db2019-09-09 13:36:45 +0100134std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
135 const WorkloadInfo& info) const
136{
Jan Eilers8eb25602020-03-09 12:13:48 +0000137 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600138
139 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
140 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Abs);
141
142 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tar914e4db2019-09-09 13:36:45 +0100143}
144
telsoa014fcda012018-03-09 14:13:49 +0000145std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
146 const WorkloadInfo& info) const
147{
Nattapat Chaimanowongd4b70592018-10-12 11:21:49 +0100148 return std::make_unique<NeonActivationWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000149}
150
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100151std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
152 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000153{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100154 return std::make_unique<NeonAdditionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000155}
156
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100157std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
158 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000159{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100160 return std::make_unique<NeonArgMinMaxWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000161}
162
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100163std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateBatchNormalization(
164 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
165{
166 return std::make_unique<NeonBatchNormalizationWorkload>(descriptor, info);
167}
168
169std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
170 const WorkloadInfo& info) const
171{
Mike Kelly56858022020-01-27 12:14:47 +0000172 return std::make_unique<NeonBatchToSpaceNdWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100173}
174
Sadik Armagan48f011e2021-04-21 10:50:34 +0100175std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
176 const WorkloadInfo& info) const
177{
178 return std::make_unique<NeonCastWorkload>(descriptor, info);
179}
180
Teresa Charline89dd692021-09-01 16:30:34 +0100181std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& descriptor,
182 const WorkloadInfo& info) const
183{
184 return std::make_unique<NeonChannelShuffleWorkload>(descriptor, info);
185}
186
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100187std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
188 const WorkloadInfo& info) const
189{
Teresa Charlincedd34f2020-03-30 11:17:30 +0100190 return std::make_unique<NeonComparisonWorkload>(descriptor, info);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100191}
192
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100193std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000194 const WorkloadInfo& info) const
195{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100196 return std::make_unique<NeonConcatWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000197}
198
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100199std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
200 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000201{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100202 return std::make_unique<NeonConstantWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000203}
204
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100205std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertBf16ToFp32(
206 const ConvertBf16ToFp32QueueDescriptor& descriptor,
207 const WorkloadInfo& info) const
208{
209 return std::make_unique<NeonConvertBf16ToFp32Workload>(descriptor, info);
210}
211
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100212std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp16ToFp32(
213 const ConvertFp16ToFp32QueueDescriptor& descriptor,
214 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000215{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100216 return std::make_unique<NeonConvertFp16ToFp32Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000217}
218
Narumol Prangnawarat250d3922020-03-30 16:11:04 +0100219std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToBf16(
220 const ConvertFp32ToBf16QueueDescriptor& descriptor,
221 const WorkloadInfo& info) const
222{
223 return std::make_unique<NeonConvertFp32ToBf16Workload>(descriptor, info);
224}
225
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100226std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateConvertFp32ToFp16(
227 const ConvertFp32ToFp16QueueDescriptor& descriptor,
228 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000229{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100230 return std::make_unique<NeonConvertFp32ToFp16Workload>(descriptor, info);
Nikhil Raj9b461482019-07-03 15:58:31 +0100231}
232
telsoa014fcda012018-03-09 14:13:49 +0000233std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateConvolution2d(
234 const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
235{
Sadik Armagan04a72972020-09-14 15:44:18 +0100236 bool isFastMathEnabled = false;
237 if (m_ModelContextPtr)
238 {
239 if (m_ModelContextPtr.get() != nullptr)
240 {
241 auto modelOptions = dynamic_cast<NeonBackendModelContext*>(m_ModelContextPtr.get());
242 if (modelOptions)
243 {
244 isFastMathEnabled = modelOptions->IsFastMathEnabled();
245 }
246 }
247 }
248 return std::make_unique<NeonConvolution2dWorkload>(descriptor,
249 info,
250 m_MemoryManager->GetIntraLayerManager(),
251 isFastMathEnabled);
telsoa014fcda012018-03-09 14:13:49 +0000252}
253
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100254std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
255 const WorkloadInfo& info) const
256{
257 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
258}
259
Aron Virginas-Tar2f00b742019-09-30 13:28:08 +0100260std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
261 const WorkloadInfo& info) const
262{
263 return std::make_unique<NeonDepthToSpaceWorkload>(descriptor, info);
264}
265
telsoa014fcda012018-03-09 14:13:49 +0000266std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDepthwiseConvolution2d(
267 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
268{
Nattapat Chaimanowong77140882018-10-17 11:12:19 +0100269 return std::make_unique<NeonDepthwiseConvolutionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000270}
271
Narumol Prangnawarat01961a72019-05-30 16:47:12 +0100272std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
273 const WorkloadInfo& info) const
274{
275 return std::make_unique<NeonDequantizeWorkload>(descriptor, info);
276}
277
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000278std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateDetectionPostProcess(
279 const armnn::DetectionPostProcessQueueDescriptor& descriptor, const armnn::WorkloadInfo& info) const
280{
James Conroyd9fb6e22020-02-21 16:52:44 +0000281 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000282}
283
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100284std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateDivision(
285 const DivisionQueueDescriptor& descriptor, const WorkloadInfo& info) const
286{
Pablo Telloe61f0712020-01-23 10:37:17 +0000287 return std::make_unique<NeonDivisionWorkload>(descriptor, info);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100288}
289
Sadik Armaganac472102020-03-24 09:54:36 +0000290std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateElementwiseUnary(
291 const ElementwiseUnaryQueueDescriptor& descriptor, const WorkloadInfo& info) const
josh minor4a3c6102020-01-06 16:40:46 -0600292{
Derek Lambertic77874a2020-04-28 13:34:56 +0100293 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600294 {
Derek Lambertic77874a2020-04-28 13:34:56 +0100295 case UnaryOperation::Abs:
James Conroy177df1e2020-11-13 10:18:51 +0000296 {
297 AbsQueueDescriptor absQueueDescriptor;
298 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
299 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600300
James Conroy177df1e2020-11-13 10:18:51 +0000301 return std::make_unique<NeonAbsWorkload>(absQueueDescriptor, info);
302 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100303 case UnaryOperation::Exp:
304 return std::make_unique<NeonExpWorkload>(descriptor, info);
305 case UnaryOperation::LogicalNot:
306 return std::make_unique<NeonLogicalNotWorkload>(descriptor, info);
307 case UnaryOperation::Log:
308 return std::make_unique<NeonLogWorkload>(descriptor, info);
309 case UnaryOperation::Neg:
310 return std::make_unique<NeonNegWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100311 case UnaryOperation::Rsqrt:
James Conroy177df1e2020-11-13 10:18:51 +0000312 {
313 RsqrtQueueDescriptor rsqrtQueueDescriptor;
314 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
315 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600316
James Conroy177df1e2020-11-13 10:18:51 +0000317 return std::make_unique<NeonRsqrtWorkload>(rsqrtQueueDescriptor, info);
318 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100319 case UnaryOperation::Sin:
320 return std::make_unique<NeonSinWorkload>(descriptor, info);
Derek Lambertic77874a2020-04-28 13:34:56 +0100321 default:
322 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600323 }
josh minor4a3c6102020-01-06 16:40:46 -0600324}
325
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100326std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
327 const WorkloadInfo& info) const
David Beckc2044fe2018-09-05 15:00:38 +0100328{
Jan Eilers8eb25602020-03-09 12:13:48 +0000329 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100330
331 ComparisonQueueDescriptor comparisonDescriptor;
332 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Equal);
333
334 return CreateComparison(comparisonDescriptor, info);
David Beckc2044fe2018-09-05 15:00:38 +0100335}
336
Sadik Armagana792a052020-06-23 16:22:23 +0100337std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
338 const WorkloadInfo& info) const
339{
340 return std::make_unique<NeonFillWorkload>(descriptor, info);
341}
342
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100343std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
344 const WorkloadInfo& info) const
345{
346 return MakeWorkloadHelper<NeonFloorFloatWorkload, NullWorkload>(descriptor, info);
347}
348
349std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateFullyConnected(
350 const FullyConnectedQueueDescriptor& descriptor, const WorkloadInfo& info) const
351{
Kevin Maybe7e35c2020-04-29 17:05:05 +0100352 return std::make_unique<NeonFullyConnectedWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100353}
354
355std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateGather(const armnn::GatherQueueDescriptor& descriptor,
356 const armnn::WorkloadInfo& info) const
357{
Teresa Charlinf540eb82020-04-10 19:24:55 +0100358 return std::make_unique<NeonGatherWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100359}
360
361std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
362 const WorkloadInfo& info) const
363{
Jan Eilers8eb25602020-03-09 12:13:48 +0000364 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100365
366 ComparisonQueueDescriptor comparisonDescriptor;
367 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Greater);
368
369 return CreateComparison(comparisonDescriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100370}
371
372std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
373 const WorkloadInfo& info) const
374{
375 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
376}
377
378std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateInstanceNormalization(
379 const InstanceNormalizationQueueDescriptor& descriptor,
380 const WorkloadInfo& info) const
381{
382 return std::make_unique<NeonInstanceNormalizationWorkload>(descriptor, info);
383}
384
385std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
386 const WorkloadInfo& info) const
387{
388 return MakeWorkloadHelper<NeonL2NormalizationFloatWorkload, NullWorkload>(descriptor, info,
389 m_MemoryManager->GetIntraLayerManager());
390}
391
Keith Davis69e653f2020-07-02 11:49:26 +0100392std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
393 const WorkloadInfo& info) const
394{
395 return std::make_unique<NeonLogSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
396}
397
James Conroy177df1e2020-11-13 10:18:51 +0000398std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
399 const WorkloadInfo& info) const
400{
401 switch(descriptor.m_Parameters.m_Operation)
402 {
403 case LogicalBinaryOperation::LogicalAnd:
404 return std::make_unique<NeonLogicalAndWorkload>(descriptor, info);
405 case LogicalBinaryOperation::LogicalOr:
406 return std::make_unique<NeonLogicalOrWorkload>(descriptor, info);
407 default:
408 return nullptr;
409 }
410}
411
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100412std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
413 const WorkloadInfo& info) const
414{
415 return MakeWorkloadHelper<NeonLstmFloatWorkload, NullWorkload>(descriptor, info);
416}
417
418std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
419 const WorkloadInfo& info) const
420{
421 return std::make_unique<NeonMaximumWorkload>(descriptor, info);
422}
423
424std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
425 const WorkloadInfo& info) const
426{
427 return std::make_unique<NeonMeanWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000428}
429
430std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
431 const WorkloadInfo& info) const
432{
433 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
434 {
435 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemCopy workload");
436 }
437
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100438 return MakeWorkloadHelper<CopyMemGenericWorkload, CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000439}
440
Derek Lambertif674aa02019-08-01 15:56:25 +0100441std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
442 const WorkloadInfo& info) const
443{
444 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
445 {
446 throw InvalidArgumentException("NeonWorkloadFactory: Invalid null input for MemImport workload");
447 }
448
449 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
450}
451
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100452std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
453 const WorkloadInfo& info) const
454{
455 return CreateConcat(descriptor, info);
456}
457
458std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
459 const WorkloadInfo& info) const
460{
461 return std::make_unique<NeonMinimumWorkload>(descriptor, info);
462}
463
464std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateMultiplication(
465 const MultiplicationQueueDescriptor& descriptor, const WorkloadInfo& info) const
466{
467 return std::make_unique<NeonMultiplicationWorkload>(descriptor, info);
468}
469
470std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateNormalization(
471 const NormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
472{
473 return MakeWorkloadHelper<NeonNormalizationFloatWorkload, NullWorkload>(descriptor, info,
474 m_MemoryManager->GetIntraLayerManager());
475}
476
477std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100478 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100479{
480 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
481}
482
483std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
484 const WorkloadInfo& info) const
485{
486 return std::make_unique<NeonPadWorkload>(descriptor, info);
487}
488
489std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100490 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100491{
492 return std::make_unique<NeonPermuteWorkload>(descriptor, info);
493}
494
495std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
Keith Davis69e653f2020-07-02 11:49:26 +0100496 const WorkloadInfo& info) const
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100497{
498 return std::make_unique<NeonPooling2dWorkload>(descriptor, info);
499}
500
501std::unique_ptr<IWorkload> NeonWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
502 const WorkloadInfo& info) const
503{
504 return MakeWorkloadHelper<NullWorkload, NullWorkload>(descriptor, info);
505}
506
507std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreatePrelu(const armnn::PreluQueueDescriptor &descriptor,
508 const armnn::WorkloadInfo &info) const
509{
510 return std::make_unique<NeonPreluWorkload>(descriptor, info);
511}
512
James Conroycc340932020-05-12 18:08:52 +0100513std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
514 const WorkloadInfo& info) const
515{
516 return std::make_unique<NeonQLstmWorkload>(descriptor, info);
517}
518
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100519std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
520 const WorkloadInfo& info) const
521{
Kevin May90774732020-03-03 12:10:10 +0000522 return std::make_unique<NeonQuantizeWorkload>(descriptor, info);
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100523}
524
525std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
526 const WorkloadInfo& info) const
527{
528 return std::make_unique<NeonQuantizedLstmWorkload>(descriptor, info);
529}
530
David Monahan97451b42020-12-03 09:48:06 +0000531std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
532 const WorkloadInfo& info) const
533{
534 return std::make_unique<NeonRankWorkload>(descriptor, info);
535}
536
Sadik Armagana2747482021-02-09 10:28:54 +0000537std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
538 const WorkloadInfo& info) const
539{
540 return std::make_unique<NeonReduceWorkload>(descriptor, info);
541}
542
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100543std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
544 const WorkloadInfo& info) const
545{
546 return std::make_unique<NeonReshapeWorkload>(descriptor, info);
547}
548
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100549std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
550 const WorkloadInfo& info) const
551{
Ellen Norris-Thompson37e68682019-07-15 14:23:30 +0100552 return std::make_unique<NeonResizeWorkload>(descriptor, info);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100553}
554
telsoa014fcda012018-03-09 14:13:49 +0000555std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateResizeBilinear(
556 const ResizeBilinearQueueDescriptor& descriptor,
557 const WorkloadInfo& info) const
558{
Ellen Norris-Thompson37e68682019-07-15 14:23:30 +0100559 ResizeQueueDescriptor resizeDescriptor;
560 resizeDescriptor.m_Inputs = descriptor.m_Inputs;
561 resizeDescriptor.m_Outputs = descriptor.m_Outputs;
562
563 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
564 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
565 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
566
567 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000568}
569
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000570std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor &descriptor,
571 const WorkloadInfo &info) const
572{
Jan Eilers8eb25602020-03-09 12:13:48 +0000573 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600574
575 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
576 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt);
577
578 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000579}
580
josh minor036f02d2019-11-15 14:53:22 -0600581std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
582 const WorkloadInfo& info) const
583{
584 return std::make_unique<NeonSliceWorkload>(descriptor, info);
585}
586
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100587std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
588 const WorkloadInfo& info) const
Sadik Armagan581742d2019-08-12 14:11:37 +0100589{
Sadik Armaganbe88a572020-04-30 11:39:37 +0100590 return std::make_unique<NeonSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Sadik Armagan581742d2019-08-12 14:11:37 +0100591}
592
Mike Kelly0be3a882020-01-24 11:27:50 +0000593std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
594 const WorkloadInfo& info) const
595{
596 return std::make_unique<NeonSpaceToBatchNdWorkload>(descriptor, info);
597}
598
599std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
600 const WorkloadInfo& info) const
narpra01b89b05f2019-01-16 09:53:09 +0000601{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100602 return std::make_unique<NeonSpaceToDepthWorkload>(descriptor, info);
603}
604
605std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
606 const WorkloadInfo& info) const
607{
608 return std::make_unique<NeonSplitterWorkload>(descriptor, info);
narpra01b89b05f2019-01-16 09:53:09 +0000609}
610
Matthew Jackson87f65ea2019-08-01 10:01:34 +0100611std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
612 const WorkloadInfo& info) const
613{
614 return std::make_unique<NeonStackWorkload>(descriptor, info);
615}
616
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100617std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
618 const WorkloadInfo& info) const
James Conroyd47a0642019-09-17 14:22:06 +0100619{
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100620 return std::make_unique<NeonStridedSliceWorkload>(descriptor, info);
621}
622
623std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateSubtraction(
624 const SubtractionQueueDescriptor& descriptor, const WorkloadInfo& info) const
625{
626 return std::make_unique<NeonSubtractionWorkload>(descriptor, info);
627}
628
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000629std::unique_ptr<armnn::IWorkload> NeonWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
630 const WorkloadInfo& info) const
631{
632 return std::make_unique<NeonTransposeWorkload>(descriptor, info);
633}
634
Sadik Armagan0d4863d2019-10-09 14:26:32 +0100635std::unique_ptr<IWorkload> NeonWorkloadFactory::CreateTransposeConvolution2d(
636 const TransposeConvolution2dQueueDescriptor &descriptor,
637 const WorkloadInfo &info) const
638{
639 return std::make_unique<NeonTransposeConvolution2dWorkload>(descriptor, info,
640 m_MemoryManager->GetIntraLayerManager());
James Conroyd47a0642019-09-17 14:22:06 +0100641}
642
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000643} // namespace armnn