blob: cb4aa923d09f34a3aadd0c43268273706ecc17bb [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin8398edc2020-07-20 14:23:02 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#include "ClWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +01006#include "ClBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "ClBackendModelContext.hpp"
telsoa014fcda012018-03-09 14:13:49 +00008
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +00009#include <Layer.hpp>
10
David Beck0dbe0ee2018-09-24 15:59:27 +010011#include <armnn/Exceptions.hpp>
12#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010014#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010015#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000016
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000017#include <backendsCommon/CpuTensorHandle.hpp>
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000018#include <backendsCommon/MakeWorkloadHelper.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000019#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010020#include <backendsCommon/MemImportWorkload.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010021
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000022#include <cl/ClTensorHandle.hpp>
23#include <cl/workloads/ClWorkloads.hpp>
24#include <cl/workloads/ClWorkloadUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000025
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000026#include <arm_compute/core/CL/CLKernelLibrary.h>
27#include <arm_compute/runtime/CL/CLBufferAllocator.h>
28#include <arm_compute/runtime/CL/CLScheduler.h>
telsoa014fcda012018-03-09 14:13:49 +000029
telsoa014fcda012018-03-09 14:13:49 +000030namespace armnn
31{
32
David Beck79141b92018-10-23 16:09:36 +010033namespace
34{
35static const BackendId s_Id{ClBackendId()};
36}
37
telsoa01c577f2c2018-08-31 09:22:23 +010038bool ClWorkloadFactory::IsLayerSupported(const Layer& layer,
David Beck29c75de2018-10-23 13:35:58 +010039 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010040 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000041{
David Beck79141b92018-10-23 16:09:36 +010042 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
43}
44
Sadik Armagan04a72972020-09-14 15:44:18 +010045bool ClWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
46 Optional<DataType> dataType,
47 std::string& outReasonIfUnsupported,
48 const ModelOptions& modelOptions)
49{
50 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
51}
52
David Beck79141b92018-10-23 16:09:36 +010053const BackendId& ClWorkloadFactory::GetBackendId() const
54{
55 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000056}
57
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +010058template <typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
59std::unique_ptr<IWorkload> ClWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
60 const WorkloadInfo& info,
61 Args&&... args)
62{
63 try
64 {
65 return MakeWorkloadHelper<FloatWorkload, Uint8Workload>(descriptor, info, std::forward<Args>(args)...);
66 }
67 catch (const cl::Error& clError)
68 {
69 throw WrapClError(clError, CHECK_LOCATION());
70 }
71}
72
73template <typename Workload, typename QueueDescriptorType, typename... Args>
74std::unique_ptr<IWorkload> ClWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
75 const WorkloadInfo& info,
76 Args&&... args)
77{
78 try
79 {
80 return std::make_unique<Workload>(descriptor, info, std::forward<Args>(args)...);
81 }
82 catch (const cl::Error& clError)
83 {
84 throw WrapClError(clError, CHECK_LOCATION());
85 }
86}
87
Aron Virginas-Tar56055192018-11-12 18:10:43 +000088ClWorkloadFactory::ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +010089 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
90{
91}
92
93ClWorkloadFactory::ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager,
94 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
95 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
telsoa014fcda012018-03-09 14:13:49 +000096{
telsoa014fcda012018-03-09 14:13:49 +000097}
98
David Monahan3fb7e102019-08-20 11:25:29 +010099std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
100 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000101{
Jan Eilers8eb25602020-03-09 12:13:48 +0000102 IgnoreUnused(IsMemoryManaged);
telsoa01c577f2c2018-08-31 09:22:23 +0100103 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo);
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000104 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
telsoa01c577f2c2018-08-31 09:22:23 +0100105
106 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +0000107}
108
Francis Murtagh351d13d2018-09-24 15:01:18 +0100109std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100110 DataLayout dataLayout,
111 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100112{
Jan Eilers8eb25602020-03-09 12:13:48 +0000113 IgnoreUnused(IsMemoryManaged);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100114 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo, dataLayout);
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000115 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
Francis Murtagh351d13d2018-09-24 15:01:18 +0100116
117 return tensorHandle;
118}
119
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100120std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
121 TensorShape const& subTensorShape,
telsoa014fcda012018-03-09 14:13:49 +0000122 unsigned int const* subTensorOrigin) const
123{
telsoa014fcda012018-03-09 14:13:49 +0000124 arm_compute::Coordinates coords;
125 arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
126
127 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
128 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
129 {
telsoa01c577f2c2018-08-31 09:22:23 +0100130 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +0000131 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +0100132 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +0000133 }
134
Derek Lamberti0790dce2019-04-15 18:37:35 +0100135 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
136 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
137 {
138 return nullptr;
139 }
140
telsoa01c577f2c2018-08-31 09:22:23 +0100141 return std::make_unique<ClSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +0100142 PolymorphicDowncast<IClTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +0000143}
144
Aron Virginas-Tar82046942019-09-09 15:18:29 +0100145std::unique_ptr<IWorkload> ClWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100146 const WorkloadInfo& info) const
Aron Virginas-Tar82046942019-09-09 15:18:29 +0100147{
Jan Eilers8eb25602020-03-09 12:13:48 +0000148 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600149
150 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
151 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Abs);
152
153 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tar82046942019-09-09 15:18:29 +0100154}
155
telsoa014fcda012018-03-09 14:13:49 +0000156std::unique_ptr<IWorkload> ClWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100157 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000158{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100159 return MakeWorkload<ClActivationWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000160}
161
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100162std::unique_ptr<IWorkload> ClWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
163 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000164{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100165 return MakeWorkload<ClAdditionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000166}
167
James Conroy2dc05722019-09-19 17:00:31 +0100168std::unique_ptr<IWorkload> ClWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
169 const WorkloadInfo& info) const
170{
171 return std::make_unique<ClArgMinMaxWorkload>(descriptor, info);
172}
173
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100174std::unique_ptr<IWorkload> ClWorkloadFactory::CreateBatchNormalization(
175 const BatchNormalizationQueueDescriptor& descriptor,
176 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000177{
arovir019e53a352018-08-31 15:26:35 +0100178 return MakeWorkload<ClBatchNormalizationFloatWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000179}
180
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100181std::unique_ptr<IWorkload> ClWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000182 const WorkloadInfo& info) const
183{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100184 return MakeWorkload<ClBatchToSpaceNdWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000185}
186
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100187std::unique_ptr<IWorkload> ClWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
188 const WorkloadInfo& info) const
189{
Teresa Charlin2b030d92020-03-27 16:40:56 +0000190 return MakeWorkload<ClComparisonWorkload>(descriptor, info);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100191}
192
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100193std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
194 const WorkloadInfo& info) const
Jim Flynn4ed6c832019-05-20 11:02:46 +0100195{
196 return MakeWorkload<ClConcatWorkload>(descriptor, info);
197}
198
telsoa014fcda012018-03-09 14:13:49 +0000199std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100200 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000201{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100202 return MakeWorkload<ClConstantWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000203}
204
telsoa01c577f2c2018-08-31 09:22:23 +0100205std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvertFp16ToFp32(
206 const ConvertFp16ToFp32QueueDescriptor& descriptor,
207 const WorkloadInfo& info) const
208{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100209 return MakeWorkload<ClConvertFp16ToFp32Workload>(descriptor, info);
telsoa01c577f2c2018-08-31 09:22:23 +0100210}
211
212std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvertFp32ToFp16(
213 const ConvertFp32ToFp16QueueDescriptor& descriptor,
214 const WorkloadInfo& info) const
215{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100216 return MakeWorkload<ClConvertFp32ToFp16Workload>(descriptor, info);
telsoa01c577f2c2018-08-31 09:22:23 +0100217}
218
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100219std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
220 const WorkloadInfo& info) const
221{
Sadik Armagan04a72972020-09-14 15:44:18 +0100222 bool isFastMathEnabled = false;
223 if (m_ModelContextPtr)
224 {
225 if (m_ModelContextPtr.get() != nullptr)
226 {
227 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
228 if (modelOptions)
229 {
230 isFastMathEnabled = modelOptions->IsFastMathEnabled();
231 }
232 }
233 }
234 return MakeWorkload<ClConvolution2dWorkload>(descriptor,
235 info,
236 m_MemoryManager->GetIntraLayerManager(),
237 isFastMathEnabled);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100238}
239
240std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
241 const WorkloadInfo& info) const
242{
243 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info);
244}
245
246std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
247 const WorkloadInfo& info) const
248{
249 return MakeWorkload<ClDepthToSpaceWorkload>(descriptor, info);
250}
251
252std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDepthwiseConvolution2d(
253 const DepthwiseConvolution2dQueueDescriptor& descriptor,
254 const WorkloadInfo& info) const
255{
256 return MakeWorkload<ClDepthwiseConvolutionWorkload>(descriptor, info);
257}
258
259std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
260 const WorkloadInfo& info) const
261{
262 return MakeWorkload<ClDequantizeWorkload>(descriptor, info);
263}
264
265std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDetectionPostProcess(
266 const DetectionPostProcessQueueDescriptor& descriptor,
267 const WorkloadInfo& info) const
268{
269 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info);
270}
271
272std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
273 const WorkloadInfo& info) const
274{
275 return MakeWorkload<ClDivisionFloatWorkload, NullWorkload>(descriptor, info);
276}
277
josh minor4a3c6102020-01-06 16:40:46 -0600278std::unique_ptr<IWorkload> ClWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
279 const WorkloadInfo& info) const
280{
Sadik Armagan9fabf432020-05-27 13:40:58 +0100281 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600282 {
Sadik Armagan9fabf432020-05-27 13:40:58 +0100283 case UnaryOperation::Abs:
284 {
285 AbsQueueDescriptor absQueueDescriptor;
286 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
287 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600288
Sadik Armagan9fabf432020-05-27 13:40:58 +0100289 return std::make_unique<ClAbsWorkload>(absQueueDescriptor, info);
290 }
291 case UnaryOperation::Exp:
292 return std::make_unique<ClExpWorkload>(descriptor, info);
293 case UnaryOperation::Neg:
294 return std::make_unique<ClNegWorkload>(descriptor, info);
295 case UnaryOperation::Rsqrt:
296 {
297 RsqrtQueueDescriptor rsqrtQueueDescriptor;
298 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
299 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600300
Sadik Armagan9fabf432020-05-27 13:40:58 +0100301 return std::make_unique<ClRsqrtWorkload>(rsqrtQueueDescriptor, info);
302 }
303 default:
304 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600305 }
josh minor4a3c6102020-01-06 16:40:46 -0600306}
307
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100308std::unique_ptr<IWorkload> ClWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
309 const WorkloadInfo& info) const
310{
Jan Eilers8eb25602020-03-09 12:13:48 +0000311 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100312
313 ComparisonQueueDescriptor comparisonDescriptor;
314 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Equal);
315
316 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100317}
318
Sadik Armagan66aecb02020-06-24 11:42:20 +0100319std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
320 const WorkloadInfo& info) const
321{
322 return std::make_unique<ClFillWorkload>(descriptor, info);
323}
324
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100325std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
326 const WorkloadInfo& info) const
327{
328 return MakeWorkload<ClFloorFloatWorkload, NullWorkload>(descriptor, info);
329}
330
331std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
332 const WorkloadInfo& info) const
333{
334 return MakeWorkload<ClFullyConnectedWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
335}
336
337std::unique_ptr<IWorkload> ClWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
338 const WorkloadInfo& info) const
339{
Teresa Charlin9ad2e5b2020-04-10 22:34:48 +0100340 return MakeWorkload<ClGatherWorkload>(descriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100341}
342
343std::unique_ptr<IWorkload> ClWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
344 const WorkloadInfo& info) const
345{
Jan Eilers8eb25602020-03-09 12:13:48 +0000346 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100347
348 ComparisonQueueDescriptor comparisonDescriptor;
349 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Greater);
350
351 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100352}
353
354std::unique_ptr<IWorkload> ClWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
355 const WorkloadInfo& info) const
356{
357 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
358}
359
360std::unique_ptr<IWorkload> ClWorkloadFactory::CreateInstanceNormalization(
361 const InstanceNormalizationQueueDescriptor& descriptor,
362 const WorkloadInfo& info) const
363{
364 return MakeWorkload<ClInstanceNormalizationWorkload>(descriptor, info);
365}
366
367std::unique_ptr<IWorkload> ClWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
368 const WorkloadInfo& info) const
369{
370 return MakeWorkload<ClL2NormalizationFloatWorkload, NullWorkload>(descriptor, info);
371}
372
Teresa Charlin8398edc2020-07-20 14:23:02 +0100373std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
374 const WorkloadInfo& info) const
375{
376 return MakeWorkload<ClLogSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
377}
378
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100379std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
380 const WorkloadInfo& info) const
381{
382 return MakeWorkload<ClLstmFloatWorkload, NullWorkload>(descriptor, info);
383}
384
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000385std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
386 const WorkloadInfo& info) const
387{
keidav01a959ee52018-12-19 10:04:58 +0000388 return MakeWorkload<ClMaximumWorkload>(descriptor, info);
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000389}
390
narpra01a6bf9122018-09-10 09:50:09 +0100391std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
392 const WorkloadInfo& info) const
393{
Aron Virginas-Tar39f44af2019-09-10 13:32:42 +0100394 return MakeWorkload<ClMeanWorkload>(descriptor, info);
narpra01a6bf9122018-09-10 09:50:09 +0100395}
396
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100397std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
398 const WorkloadInfo& info) const
jimfly012c9322a2018-09-19 10:59:49 +0100399{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100400 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
401 {
402 throw InvalidArgumentException("ClWorkloadFactory: Invalid null input for MemCopy workload");
403 }
404
405 return MakeWorkload<CopyMemGenericWorkload>(descriptor, info);
jimfly012c9322a2018-09-19 10:59:49 +0100406}
407
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100408std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
409 const WorkloadInfo& info) const
FrancisMurtagh20995952018-12-17 12:11:36 +0000410{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100411 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
412 {
413 throw InvalidArgumentException("ClWorkloadFactory: Invalid null input for MemImport workload");
414 }
415
416 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
FrancisMurtagh20995952018-12-17 12:11:36 +0000417}
418
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100419std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
420 const WorkloadInfo& info) const
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000421{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100422 return CreateConcat(descriptor, info);
Conor Kennedy430b5d82018-11-14 15:28:28 +0000423}
424
kevmay0190539692018-11-29 08:40:19 +0000425std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
426 const WorkloadInfo& info) const
427{
saoste019292aa32019-01-08 13:55:59 +0000428 return MakeWorkload<ClMinimumWorkload>(descriptor, info);
kevmay0190539692018-11-29 08:40:19 +0000429}
430
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100431std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
432 const WorkloadInfo& info) const
433{
434 return MakeWorkload<ClMultiplicationWorkload>(descriptor, info);
435}
436
437std::unique_ptr<IWorkload> ClWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
438 const WorkloadInfo& info) const
439{
440 return MakeWorkload<ClNormalizationFloatWorkload, NullWorkload>(descriptor, info);
441}
442
443std::unique_ptr<IWorkload> ClWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
444 const WorkloadInfo& info) const
445{
446 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
447}
448
449std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
450 const WorkloadInfo& info) const
451{
452 return MakeWorkload<ClPadWorkload>(descriptor, info);
453}
454
455std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000456 const WorkloadInfo& info) const
457{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100458 return MakeWorkload<ClPermuteWorkload>(descriptor, info);
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000459}
460
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100461std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
462 const WorkloadInfo& info) const
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000463{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100464 return MakeWorkload<ClPooling2dWorkload>(descriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000465}
466
Matteo Martincigh49124022019-01-11 13:25:59 +0000467std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
468 const WorkloadInfo& info) const
469{
470 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info);
471}
472
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100473std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
474 const WorkloadInfo &info) const
narpra01b89b05f2019-01-16 09:53:09 +0000475{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100476 return MakeWorkload<ClPreluWorkload>(descriptor, info);
narpra01b89b05f2019-01-16 09:53:09 +0000477}
478
Ryan OShea2323af42020-05-13 16:36:19 +0100479std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
480 const WorkloadInfo& info) const
481{
482 return std::make_unique<ClQLstmWorkload>(descriptor, info);
483}
484
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100485std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
486 const WorkloadInfo& info) const
Aron Virginas-Tar7a3e2fe2019-06-27 18:54:47 +0100487{
Keith Davisa8565012020-02-14 12:22:40 +0000488 return MakeWorkload<ClQuantizeWorkload>(descriptor, info);
James Conroyd2aa85e2019-07-01 17:12:40 +0100489}
490
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100491std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
492 const WorkloadInfo& info) const
493{
494 return MakeWorkload<ClQuantizedLstmWorkload>(descriptor, info);
495}
496
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100497std::unique_ptr<IWorkload> ClWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
498 const WorkloadInfo& info) const
499{
500 return MakeWorkload<ClReshapeWorkload>(descriptor, info);
501}
502
503std::unique_ptr<IWorkload> ClWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
504 const WorkloadInfo& info) const
505{
506 return MakeWorkload<ClResizeWorkload>(descriptor, info);
507}
508
509std::unique_ptr<IWorkload> ClWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
510 const WorkloadInfo& info) const
511{
512 ResizeQueueDescriptor resizeDescriptor;
513 resizeDescriptor.m_Inputs = descriptor.m_Inputs;
514 resizeDescriptor.m_Outputs = descriptor.m_Outputs;
515
516 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
517 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
518 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
519 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
520
521 return CreateResize(resizeDescriptor, info);
522}
523
524std::unique_ptr<IWorkload> ClWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
525 const WorkloadInfo& info) const
526{
Jan Eilers8eb25602020-03-09 12:13:48 +0000527 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600528
529 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
530 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt);
531
532 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100533}
534
Aron Virginas-Tar94c4fef2019-11-25 15:37:08 +0000535std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
536 const WorkloadInfo& info) const
537{
538 return MakeWorkload<ClSliceWorkload>(descriptor, info);
539}
540
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100541std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
542 const WorkloadInfo& info) const
543{
Teresa Charlinc1f6b092020-05-11 16:10:38 +0100544 return std::make_unique<ClSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100545}
546
547std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
548 const WorkloadInfo& info) const
549{
550 return MakeWorkload<ClSpaceToBatchNdWorkload>(descriptor, info);
551}
552
553std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
554 const WorkloadInfo& info) const
555{
556 return MakeWorkload<ClSpaceToDepthWorkload>(descriptor, info);
557}
558
559std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
560 const WorkloadInfo& info) const
561{
562 return MakeWorkload<ClSplitterWorkload>(descriptor, info);
563}
564
Matthew Jacksond5166102019-07-31 14:06:28 +0100565std::unique_ptr<IWorkload> ClWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
566 const WorkloadInfo& info) const
567{
568 return MakeWorkload<ClStackWorkload>(descriptor, info);
569}
570
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100571std::unique_ptr<IWorkload> ClWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100572 const WorkloadInfo& info) const
573{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100574 return MakeWorkload<ClStridedSliceWorkload>(descriptor, info);
575}
576
577std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
578 const WorkloadInfo& info) const
579{
580 return MakeWorkload<ClSubtractionWorkload>(descriptor, info);
581}
582
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000583std::unique_ptr<IWorkload> ClWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
584 const WorkloadInfo& info) const
585{
586 return MakeWorkload<ClTransposeWorkload>(descriptor, info);
587}
588
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100589std::unique_ptr<IWorkload> ClWorkloadFactory::CreateTransposeConvolution2d(
590 const TransposeConvolution2dQueueDescriptor& descriptor,
591 const WorkloadInfo& info) const
592{
593 return MakeWorkload<ClTransposeConvolution2dWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100594}
595
telsoa014fcda012018-03-09 14:13:49 +0000596} // namespace armnn