blob: f6650dc2d086e46cd19dc1545c50bfcc27feb916 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin8398edc2020-07-20 14:23:02 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#include "ClWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +01006#include "ClBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "ClBackendModelContext.hpp"
telsoa014fcda012018-03-09 14:13:49 +00008
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +00009#include <Layer.hpp>
10
David Beck0dbe0ee2018-09-24 15:59:27 +010011#include <armnn/Exceptions.hpp>
12#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010014#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010015#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000016
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000017#include <backendsCommon/CpuTensorHandle.hpp>
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000018#include <backendsCommon/MakeWorkloadHelper.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000019#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010020#include <backendsCommon/MemImportWorkload.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010021
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000022#include <cl/ClTensorHandle.hpp>
23#include <cl/workloads/ClWorkloads.hpp>
24#include <cl/workloads/ClWorkloadUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000025
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000026#include <arm_compute/core/CL/CLKernelLibrary.h>
27#include <arm_compute/runtime/CL/CLBufferAllocator.h>
28#include <arm_compute/runtime/CL/CLScheduler.h>
telsoa014fcda012018-03-09 14:13:49 +000029
telsoa014fcda012018-03-09 14:13:49 +000030#include <boost/format.hpp>
31
32namespace armnn
33{
34
David Beck79141b92018-10-23 16:09:36 +010035namespace
36{
37static const BackendId s_Id{ClBackendId()};
38}
39
telsoa01c577f2c2018-08-31 09:22:23 +010040bool ClWorkloadFactory::IsLayerSupported(const Layer& layer,
David Beck29c75de2018-10-23 13:35:58 +010041 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010042 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000043{
David Beck79141b92018-10-23 16:09:36 +010044 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
45}
46
Sadik Armagan04a72972020-09-14 15:44:18 +010047bool ClWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
48 Optional<DataType> dataType,
49 std::string& outReasonIfUnsupported,
50 const ModelOptions& modelOptions)
51{
52 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
53}
54
David Beck79141b92018-10-23 16:09:36 +010055const BackendId& ClWorkloadFactory::GetBackendId() const
56{
57 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000058}
59
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +010060template <typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
61std::unique_ptr<IWorkload> ClWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
62 const WorkloadInfo& info,
63 Args&&... args)
64{
65 try
66 {
67 return MakeWorkloadHelper<FloatWorkload, Uint8Workload>(descriptor, info, std::forward<Args>(args)...);
68 }
69 catch (const cl::Error& clError)
70 {
71 throw WrapClError(clError, CHECK_LOCATION());
72 }
73}
74
75template <typename Workload, typename QueueDescriptorType, typename... Args>
76std::unique_ptr<IWorkload> ClWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
77 const WorkloadInfo& info,
78 Args&&... args)
79{
80 try
81 {
82 return std::make_unique<Workload>(descriptor, info, std::forward<Args>(args)...);
83 }
84 catch (const cl::Error& clError)
85 {
86 throw WrapClError(clError, CHECK_LOCATION());
87 }
88}
89
Aron Virginas-Tar56055192018-11-12 18:10:43 +000090ClWorkloadFactory::ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +010091 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
92{
93}
94
95ClWorkloadFactory::ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager,
96 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
97 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
telsoa014fcda012018-03-09 14:13:49 +000098{
telsoa014fcda012018-03-09 14:13:49 +000099}
100
David Monahan3fb7e102019-08-20 11:25:29 +0100101std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
102 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000103{
Jan Eilers8eb25602020-03-09 12:13:48 +0000104 IgnoreUnused(IsMemoryManaged);
telsoa01c577f2c2018-08-31 09:22:23 +0100105 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo);
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000106 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
telsoa01c577f2c2018-08-31 09:22:23 +0100107
108 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +0000109}
110
Francis Murtagh351d13d2018-09-24 15:01:18 +0100111std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100112 DataLayout dataLayout,
113 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100114{
Jan Eilers8eb25602020-03-09 12:13:48 +0000115 IgnoreUnused(IsMemoryManaged);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100116 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo, dataLayout);
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000117 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
Francis Murtagh351d13d2018-09-24 15:01:18 +0100118
119 return tensorHandle;
120}
121
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100122std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
123 TensorShape const& subTensorShape,
telsoa014fcda012018-03-09 14:13:49 +0000124 unsigned int const* subTensorOrigin) const
125{
telsoa014fcda012018-03-09 14:13:49 +0000126 arm_compute::Coordinates coords;
127 arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
128
129 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
130 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
131 {
telsoa01c577f2c2018-08-31 09:22:23 +0100132 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +0000133 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +0100134 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +0000135 }
136
Derek Lamberti0790dce2019-04-15 18:37:35 +0100137 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
138 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
139 {
140 return nullptr;
141 }
142
telsoa01c577f2c2018-08-31 09:22:23 +0100143 return std::make_unique<ClSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +0100144 PolymorphicDowncast<IClTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +0000145}
146
Aron Virginas-Tar82046942019-09-09 15:18:29 +0100147std::unique_ptr<IWorkload> ClWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100148 const WorkloadInfo& info) const
Aron Virginas-Tar82046942019-09-09 15:18:29 +0100149{
Jan Eilers8eb25602020-03-09 12:13:48 +0000150 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600151
152 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
153 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Abs);
154
155 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tar82046942019-09-09 15:18:29 +0100156}
157
telsoa014fcda012018-03-09 14:13:49 +0000158std::unique_ptr<IWorkload> ClWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100159 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000160{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100161 return MakeWorkload<ClActivationWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000162}
163
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100164std::unique_ptr<IWorkload> ClWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
165 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000166{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100167 return MakeWorkload<ClAdditionWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000168}
169
James Conroy2dc05722019-09-19 17:00:31 +0100170std::unique_ptr<IWorkload> ClWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
171 const WorkloadInfo& info) const
172{
173 return std::make_unique<ClArgMinMaxWorkload>(descriptor, info);
174}
175
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100176std::unique_ptr<IWorkload> ClWorkloadFactory::CreateBatchNormalization(
177 const BatchNormalizationQueueDescriptor& descriptor,
178 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000179{
arovir019e53a352018-08-31 15:26:35 +0100180 return MakeWorkload<ClBatchNormalizationFloatWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000181}
182
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100183std::unique_ptr<IWorkload> ClWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000184 const WorkloadInfo& info) const
185{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100186 return MakeWorkload<ClBatchToSpaceNdWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000187}
188
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100189std::unique_ptr<IWorkload> ClWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
190 const WorkloadInfo& info) const
191{
Teresa Charlin2b030d92020-03-27 16:40:56 +0000192 return MakeWorkload<ClComparisonWorkload>(descriptor, info);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100193}
194
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100195std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
196 const WorkloadInfo& info) const
Jim Flynn4ed6c832019-05-20 11:02:46 +0100197{
198 return MakeWorkload<ClConcatWorkload>(descriptor, info);
199}
200
telsoa014fcda012018-03-09 14:13:49 +0000201std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100202 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000203{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100204 return MakeWorkload<ClConstantWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000205}
206
telsoa01c577f2c2018-08-31 09:22:23 +0100207std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvertFp16ToFp32(
208 const ConvertFp16ToFp32QueueDescriptor& descriptor,
209 const WorkloadInfo& info) const
210{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100211 return MakeWorkload<ClConvertFp16ToFp32Workload>(descriptor, info);
telsoa01c577f2c2018-08-31 09:22:23 +0100212}
213
214std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvertFp32ToFp16(
215 const ConvertFp32ToFp16QueueDescriptor& descriptor,
216 const WorkloadInfo& info) const
217{
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100218 return MakeWorkload<ClConvertFp32ToFp16Workload>(descriptor, info);
telsoa01c577f2c2018-08-31 09:22:23 +0100219}
220
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100221std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
222 const WorkloadInfo& info) const
223{
Sadik Armagan04a72972020-09-14 15:44:18 +0100224 bool isFastMathEnabled = false;
225 if (m_ModelContextPtr)
226 {
227 if (m_ModelContextPtr.get() != nullptr)
228 {
229 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
230 if (modelOptions)
231 {
232 isFastMathEnabled = modelOptions->IsFastMathEnabled();
233 }
234 }
235 }
236 return MakeWorkload<ClConvolution2dWorkload>(descriptor,
237 info,
238 m_MemoryManager->GetIntraLayerManager(),
239 isFastMathEnabled);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100240}
241
242std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
243 const WorkloadInfo& info) const
244{
245 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info);
246}
247
248std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
249 const WorkloadInfo& info) const
250{
251 return MakeWorkload<ClDepthToSpaceWorkload>(descriptor, info);
252}
253
254std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDepthwiseConvolution2d(
255 const DepthwiseConvolution2dQueueDescriptor& descriptor,
256 const WorkloadInfo& info) const
257{
258 return MakeWorkload<ClDepthwiseConvolutionWorkload>(descriptor, info);
259}
260
261std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
262 const WorkloadInfo& info) const
263{
264 return MakeWorkload<ClDequantizeWorkload>(descriptor, info);
265}
266
267std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDetectionPostProcess(
268 const DetectionPostProcessQueueDescriptor& descriptor,
269 const WorkloadInfo& info) const
270{
271 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info);
272}
273
274std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
275 const WorkloadInfo& info) const
276{
277 return MakeWorkload<ClDivisionFloatWorkload, NullWorkload>(descriptor, info);
278}
279
josh minor4a3c6102020-01-06 16:40:46 -0600280std::unique_ptr<IWorkload> ClWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
281 const WorkloadInfo& info) const
282{
Sadik Armagan9fabf432020-05-27 13:40:58 +0100283 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600284 {
Sadik Armagan9fabf432020-05-27 13:40:58 +0100285 case UnaryOperation::Abs:
286 {
287 AbsQueueDescriptor absQueueDescriptor;
288 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
289 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600290
Sadik Armagan9fabf432020-05-27 13:40:58 +0100291 return std::make_unique<ClAbsWorkload>(absQueueDescriptor, info);
292 }
293 case UnaryOperation::Exp:
294 return std::make_unique<ClExpWorkload>(descriptor, info);
295 case UnaryOperation::Neg:
296 return std::make_unique<ClNegWorkload>(descriptor, info);
297 case UnaryOperation::Rsqrt:
298 {
299 RsqrtQueueDescriptor rsqrtQueueDescriptor;
300 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
301 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600302
Sadik Armagan9fabf432020-05-27 13:40:58 +0100303 return std::make_unique<ClRsqrtWorkload>(rsqrtQueueDescriptor, info);
304 }
305 default:
306 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600307 }
josh minor4a3c6102020-01-06 16:40:46 -0600308}
309
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100310std::unique_ptr<IWorkload> ClWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
311 const WorkloadInfo& info) const
312{
Jan Eilers8eb25602020-03-09 12:13:48 +0000313 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100314
315 ComparisonQueueDescriptor comparisonDescriptor;
316 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Equal);
317
318 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100319}
320
Sadik Armagan66aecb02020-06-24 11:42:20 +0100321std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
322 const WorkloadInfo& info) const
323{
324 return std::make_unique<ClFillWorkload>(descriptor, info);
325}
326
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100327std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
328 const WorkloadInfo& info) const
329{
330 return MakeWorkload<ClFloorFloatWorkload, NullWorkload>(descriptor, info);
331}
332
333std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
334 const WorkloadInfo& info) const
335{
336 return MakeWorkload<ClFullyConnectedWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
337}
338
339std::unique_ptr<IWorkload> ClWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
340 const WorkloadInfo& info) const
341{
Teresa Charlin9ad2e5b2020-04-10 22:34:48 +0100342 return MakeWorkload<ClGatherWorkload>(descriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100343}
344
345std::unique_ptr<IWorkload> ClWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
346 const WorkloadInfo& info) const
347{
Jan Eilers8eb25602020-03-09 12:13:48 +0000348 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100349
350 ComparisonQueueDescriptor comparisonDescriptor;
351 comparisonDescriptor.m_Parameters = ComparisonDescriptor(ComparisonOperation::Greater);
352
353 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100354}
355
356std::unique_ptr<IWorkload> ClWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
357 const WorkloadInfo& info) const
358{
359 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
360}
361
362std::unique_ptr<IWorkload> ClWorkloadFactory::CreateInstanceNormalization(
363 const InstanceNormalizationQueueDescriptor& descriptor,
364 const WorkloadInfo& info) const
365{
366 return MakeWorkload<ClInstanceNormalizationWorkload>(descriptor, info);
367}
368
369std::unique_ptr<IWorkload> ClWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
370 const WorkloadInfo& info) const
371{
372 return MakeWorkload<ClL2NormalizationFloatWorkload, NullWorkload>(descriptor, info);
373}
374
Teresa Charlin8398edc2020-07-20 14:23:02 +0100375std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
376 const WorkloadInfo& info) const
377{
378 return MakeWorkload<ClLogSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
379}
380
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100381std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
382 const WorkloadInfo& info) const
383{
384 return MakeWorkload<ClLstmFloatWorkload, NullWorkload>(descriptor, info);
385}
386
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000387std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
388 const WorkloadInfo& info) const
389{
keidav01a959ee52018-12-19 10:04:58 +0000390 return MakeWorkload<ClMaximumWorkload>(descriptor, info);
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000391}
392
narpra01a6bf9122018-09-10 09:50:09 +0100393std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
394 const WorkloadInfo& info) const
395{
Aron Virginas-Tar39f44af2019-09-10 13:32:42 +0100396 return MakeWorkload<ClMeanWorkload>(descriptor, info);
narpra01a6bf9122018-09-10 09:50:09 +0100397}
398
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100399std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
400 const WorkloadInfo& info) const
jimfly012c9322a2018-09-19 10:59:49 +0100401{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100402 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
403 {
404 throw InvalidArgumentException("ClWorkloadFactory: Invalid null input for MemCopy workload");
405 }
406
407 return MakeWorkload<CopyMemGenericWorkload>(descriptor, info);
jimfly012c9322a2018-09-19 10:59:49 +0100408}
409
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100410std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
411 const WorkloadInfo& info) const
FrancisMurtagh20995952018-12-17 12:11:36 +0000412{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100413 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
414 {
415 throw InvalidArgumentException("ClWorkloadFactory: Invalid null input for MemImport workload");
416 }
417
418 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
FrancisMurtagh20995952018-12-17 12:11:36 +0000419}
420
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100421std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
422 const WorkloadInfo& info) const
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000423{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100424 return CreateConcat(descriptor, info);
Conor Kennedy430b5d82018-11-14 15:28:28 +0000425}
426
kevmay0190539692018-11-29 08:40:19 +0000427std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
428 const WorkloadInfo& info) const
429{
saoste019292aa32019-01-08 13:55:59 +0000430 return MakeWorkload<ClMinimumWorkload>(descriptor, info);
kevmay0190539692018-11-29 08:40:19 +0000431}
432
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100433std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
434 const WorkloadInfo& info) const
435{
436 return MakeWorkload<ClMultiplicationWorkload>(descriptor, info);
437}
438
439std::unique_ptr<IWorkload> ClWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
440 const WorkloadInfo& info) const
441{
442 return MakeWorkload<ClNormalizationFloatWorkload, NullWorkload>(descriptor, info);
443}
444
445std::unique_ptr<IWorkload> ClWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
446 const WorkloadInfo& info) const
447{
448 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
449}
450
451std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
452 const WorkloadInfo& info) const
453{
454 return MakeWorkload<ClPadWorkload>(descriptor, info);
455}
456
457std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000458 const WorkloadInfo& info) const
459{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100460 return MakeWorkload<ClPermuteWorkload>(descriptor, info);
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000461}
462
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100463std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
464 const WorkloadInfo& info) const
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000465{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100466 return MakeWorkload<ClPooling2dWorkload>(descriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000467}
468
Matteo Martincigh49124022019-01-11 13:25:59 +0000469std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
470 const WorkloadInfo& info) const
471{
472 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info);
473}
474
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100475std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
476 const WorkloadInfo &info) const
narpra01b89b05f2019-01-16 09:53:09 +0000477{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100478 return MakeWorkload<ClPreluWorkload>(descriptor, info);
narpra01b89b05f2019-01-16 09:53:09 +0000479}
480
Ryan OShea2323af42020-05-13 16:36:19 +0100481std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
482 const WorkloadInfo& info) const
483{
484 return std::make_unique<ClQLstmWorkload>(descriptor, info);
485}
486
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100487std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
488 const WorkloadInfo& info) const
Aron Virginas-Tar7a3e2fe2019-06-27 18:54:47 +0100489{
Keith Davisa8565012020-02-14 12:22:40 +0000490 return MakeWorkload<ClQuantizeWorkload>(descriptor, info);
James Conroyd2aa85e2019-07-01 17:12:40 +0100491}
492
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100493std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
494 const WorkloadInfo& info) const
495{
496 return MakeWorkload<ClQuantizedLstmWorkload>(descriptor, info);
497}
498
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100499std::unique_ptr<IWorkload> ClWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
500 const WorkloadInfo& info) const
501{
502 return MakeWorkload<ClReshapeWorkload>(descriptor, info);
503}
504
505std::unique_ptr<IWorkload> ClWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
506 const WorkloadInfo& info) const
507{
508 return MakeWorkload<ClResizeWorkload>(descriptor, info);
509}
510
511std::unique_ptr<IWorkload> ClWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
512 const WorkloadInfo& info) const
513{
514 ResizeQueueDescriptor resizeDescriptor;
515 resizeDescriptor.m_Inputs = descriptor.m_Inputs;
516 resizeDescriptor.m_Outputs = descriptor.m_Outputs;
517
518 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
519 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
520 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
521 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
522
523 return CreateResize(resizeDescriptor, info);
524}
525
526std::unique_ptr<IWorkload> ClWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
527 const WorkloadInfo& info) const
528{
Jan Eilers8eb25602020-03-09 12:13:48 +0000529 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600530
531 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
532 elementwiseUnaryDescriptor.m_Parameters = ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt);
533
534 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100535}
536
Aron Virginas-Tar94c4fef2019-11-25 15:37:08 +0000537std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
538 const WorkloadInfo& info) const
539{
540 return MakeWorkload<ClSliceWorkload>(descriptor, info);
541}
542
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100543std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
544 const WorkloadInfo& info) const
545{
Teresa Charlinc1f6b092020-05-11 16:10:38 +0100546 return std::make_unique<ClSoftmaxWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100547}
548
549std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
550 const WorkloadInfo& info) const
551{
552 return MakeWorkload<ClSpaceToBatchNdWorkload>(descriptor, info);
553}
554
555std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
556 const WorkloadInfo& info) const
557{
558 return MakeWorkload<ClSpaceToDepthWorkload>(descriptor, info);
559}
560
561std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
562 const WorkloadInfo& info) const
563{
564 return MakeWorkload<ClSplitterWorkload>(descriptor, info);
565}
566
Matthew Jacksond5166102019-07-31 14:06:28 +0100567std::unique_ptr<IWorkload> ClWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
568 const WorkloadInfo& info) const
569{
570 return MakeWorkload<ClStackWorkload>(descriptor, info);
571}
572
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100573std::unique_ptr<IWorkload> ClWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100574 const WorkloadInfo& info) const
575{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100576 return MakeWorkload<ClStridedSliceWorkload>(descriptor, info);
577}
578
579std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
580 const WorkloadInfo& info) const
581{
582 return MakeWorkload<ClSubtractionWorkload>(descriptor, info);
583}
584
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000585std::unique_ptr<IWorkload> ClWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
586 const WorkloadInfo& info) const
587{
588 return MakeWorkload<ClTransposeWorkload>(descriptor, info);
589}
590
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100591std::unique_ptr<IWorkload> ClWorkloadFactory::CreateTransposeConvolution2d(
592 const TransposeConvolution2dQueueDescriptor& descriptor,
593 const WorkloadInfo& info) const
594{
595 return MakeWorkload<ClTransposeConvolution2dWorkload>(descriptor, info, m_MemoryManager->GetIntraLayerManager());
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100596}
597
telsoa014fcda012018-03-09 14:13:49 +0000598} // namespace armnn