blob: aebf19bf2867e54f739d3d9de0d4ebb261ab6792 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010011#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010012#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010013#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
telsoa014fcda012018-03-09 14:13:49 +000015
16namespace armnn
17{
18
David Beck79141b92018-10-23 16:09:36 +010019namespace
20{
21static const BackendId s_Id{RefBackendId()};
22}
telsoa014fcda012018-03-09 14:13:49 +000023template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
24std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010025 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000026{
Keith Davis5204aa82020-01-27 15:24:59 +000027 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
28 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010031template <DataType ArmnnType>
32bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010033{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010034 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
35 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010036 if (it != std::end(info.m_InputTensorInfos))
37 {
38 return true;
39 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010040 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010041 if (it != std::end(info.m_OutputTensorInfos))
42 {
43 return true;
44 }
45 return false;
46}
47
Keith Davis0c2eeac2020-02-11 16:51:50 +000048bool IsSigned32(const WorkloadInfo& info)
49{
50 return IsDataType<DataType::Signed32>(info);
51}
52
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000053bool IsBFloat16(const WorkloadInfo& info)
54{
55 return IsDataType<DataType::BFloat16>(info);
56}
57
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010058bool IsFloat16(const WorkloadInfo& info)
59{
60 return IsDataType<DataType::Float16>(info);
61}
62
Keith Davis0c2eeac2020-02-11 16:51:50 +000063bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010064{
Derek Lambertif90c56d2020-01-10 17:14:08 +000065 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010066}
67
Keith Davis0c2eeac2020-02-11 16:51:50 +000068bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000069{
70 return IsDataType<DataType::QSymmS8>(info);
71}
72
Keith Davis67e6c542020-02-19 10:08:33 +000073bool IsQAsymmS8(const WorkloadInfo& info)
74{
75 return IsDataType<DataType::QAsymmS8>(info);
76}
77
78bool IsQAsymmU8(const WorkloadInfo& info)
79{
80 return IsDataType<DataType::QAsymmU8>(info);
81}
82
Matthew Bentham7c1603a2019-06-21 17:22:23 +010083RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
84 : m_MemoryManager(memoryManager)
85{
86}
87
telsoa01c577f2c2018-08-31 09:22:23 +010088RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010089 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000090{
91}
92
David Beck79141b92018-10-23 16:09:36 +010093const BackendId& RefWorkloadFactory::GetBackendId() const
94{
95 return s_Id;
96}
97
David Beck29c75de2018-10-23 13:35:58 +010098bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
99 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100100 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000101{
David Beck79141b92018-10-23 16:09:36 +0100102 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000103}
104
David Monahan3fb7e102019-08-20 11:25:29 +0100105std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000106 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000107{
David Monahan3fb7e102019-08-20 11:25:29 +0100108 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
109 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000110 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100111 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000112}
113
Francis Murtagh351d13d2018-09-24 15:01:18 +0100114std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100115 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000116 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100117{
David Monahan3fb7e102019-08-20 11:25:29 +0100118 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
119 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000120 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100121 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122}
123
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100124std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
125 const WorkloadInfo& info) const
126{
Jan Eilers8eb25602020-03-09 12:13:48 +0000127 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600128 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
129 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
130
131 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100132}
133
134std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
135 const WorkloadInfo& info) const
136{
137 return std::make_unique<RefActivationWorkload>(descriptor, info);
138}
139
140std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
141 const WorkloadInfo& info) const
142{
143 return std::make_unique<RefAdditionWorkload>(descriptor, info);
144}
145
146std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
147 const WorkloadInfo& info) const
148{
149 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
150}
151
152std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
153 const BatchNormalizationQueueDescriptor& descriptor,
154 const WorkloadInfo& info) const
155{
156 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
157}
158
159std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
160 const WorkloadInfo& info) const
161{
162 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
163}
164
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100165std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
166 const WorkloadInfo& info) const
167{
168 return std::make_unique<RefComparisonWorkload>(descriptor, info);
169}
170
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100171std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
172 const WorkloadInfo& info) const
173{
174 return std::make_unique<RefConcatWorkload>(descriptor, info);
175}
176
177std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
178 const WorkloadInfo& info) const
179{
180 return std::make_unique<RefConstantWorkload>(descriptor, info);
181}
182
183std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
184 const ConvertFp16ToFp32QueueDescriptor& descriptor,
185 const WorkloadInfo& info) const
186{
187 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
188}
189
190std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
191 const ConvertFp32ToFp16QueueDescriptor& descriptor,
192 const WorkloadInfo& info) const
193{
194 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
195}
196
197std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
198 const WorkloadInfo& info) const
199{
200 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
201}
202
203std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
204 const WorkloadInfo& info) const
205{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000206 if (IsBFloat16(info))
207 {
208 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
209 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000210 if (IsFloat16(info))
211 {
212 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
213 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000214 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100215 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000216 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100217 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000218 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000219 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000220 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000221 }
Keith Davis67e6c542020-02-19 10:08:33 +0000222 if (IsQAsymmU8(info))
223 {
224 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
225 }
226 if (IsQAsymmS8(info))
227 {
228 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
229 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000230 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000231 {
232 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
233 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000234
Keith Davis0c2eeac2020-02-11 16:51:50 +0000235 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100236}
237
238std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
239 const WorkloadInfo& info) const
240{
241 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
242}
243
244std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
245 const DepthwiseConvolution2dQueueDescriptor& descriptor,
246 const WorkloadInfo& info) const
247{
248 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
249}
250
251std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
252 const WorkloadInfo& info) const
253{
254 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
255}
256
257std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
258 const DetectionPostProcessQueueDescriptor& descriptor,
259 const WorkloadInfo& info) const
260{
261 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
262}
263
264std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
265 const WorkloadInfo& info) const
266{
267 return std::make_unique<RefDivisionWorkload>(descriptor, info);
268}
269
josh minor4a3c6102020-01-06 16:40:46 -0600270std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
271 const WorkloadInfo& info) const
272{
273 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
274}
275
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100276std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
277 const WorkloadInfo& info) const
278{
Jan Eilers8eb25602020-03-09 12:13:48 +0000279 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100280 ComparisonQueueDescriptor comparisonDescriptor;
281 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
282
283 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100284}
285
286std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(
287 const FakeQuantizationQueueDescriptor& descriptor,
288 const WorkloadInfo& info) const
289{
290 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
291}
292
293std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
294 const WorkloadInfo& info) const
295{
296 return std::make_unique<RefFloorWorkload>(descriptor, info);
297}
298
299std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
300 const FullyConnectedQueueDescriptor& descriptor,
301 const WorkloadInfo& info) const
302{
303 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
304}
305
306std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
307 const WorkloadInfo& info) const
308{
309 return std::make_unique<RefGatherWorkload>(descriptor, info);
310}
311
312std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
313 const WorkloadInfo& info) const
314{
Jan Eilers8eb25602020-03-09 12:13:48 +0000315 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100316 ComparisonQueueDescriptor comparisonDescriptor;
317 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
318
319 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100320}
321
telsoa014fcda012018-03-09 14:13:49 +0000322std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
323 const WorkloadInfo& info) const
324{
325 if (info.m_InputTensorInfos.empty() )
326 {
327 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
328 }
329 if (info.m_OutputTensorInfos.empty())
330 {
331 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
332 }
333
334 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
335 {
336 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
337 }
338
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100339 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000340}
341
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100342std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
343 const InstanceNormalizationQueueDescriptor& descriptor,
344 const WorkloadInfo& info) const
345{
346 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
347}
348
349std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
350 const WorkloadInfo& info) const
351{
352 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
353}
354
355std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
356 const WorkloadInfo& info) const
357{
358 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
359}
360
361std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
362 const WorkloadInfo& info) const
363{
364 return std::make_unique<RefLstmWorkload>(descriptor, info);
365}
366
367std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
368 const WorkloadInfo& info) const
369{
370 return std::make_unique<RefMaximumWorkload>(descriptor, info);
371}
372
373std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
374 const WorkloadInfo& info) const
375{
376 return std::make_unique<RefMeanWorkload>(descriptor, info);
377}
378
379std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
380 const WorkloadInfo& info) const
381{
382 if (descriptor.m_Inputs.empty())
383 {
384 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
385 }
386 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
387}
388
389std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
390 const WorkloadInfo& info) const
391{
392 if (descriptor.m_Inputs.empty())
393 {
394 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
395 }
396 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
397}
398
399std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
400 const WorkloadInfo& info) const
401{
402 return CreateConcat(descriptor, info);
403}
404
405std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
406 const WorkloadInfo& info) const
407{
408 return std::make_unique<RefMinimumWorkload>(descriptor, info);
409}
410
411std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
412 const WorkloadInfo& info) const
413{
414 return std::make_unique<RefMultiplicationWorkload>(descriptor, info);
415}
416
417std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
418 const WorkloadInfo& info) const
419{
420 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
421}
422
telsoa014fcda012018-03-09 14:13:49 +0000423std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
424 const WorkloadInfo& info) const
425{
426 if (info.m_InputTensorInfos.empty() )
427 {
428 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
429 }
430 if (info.m_OutputTensorInfos.empty())
431 {
432 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
433 }
434 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
435 {
436 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
437 }
438
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100439 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000440}
441
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100442std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
443 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000444{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000445 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100446 {
447 return std::make_unique<RefPadQSymm16Workload>(descriptor, info);
448 }
449 else if (IsFloat16(info))
450 {
451 return std::make_unique<RefPadFloat16Workload>(descriptor, info);
452 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000453 else if (IsBFloat16(info))
454 {
455 return std::make_unique<RefPadBFloat16Workload>(descriptor, info);
456 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100457 return MakeWorkload<RefPadFloat32Workload, RefPadQAsymm8Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000458}
459
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100460std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
461 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000462{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000463 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100464 {
465 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
466 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000467 else if (IsBFloat16(info))
468 {
469 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
470 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100471 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000472 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000473}
474
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100475std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
476 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000477{
Teresa Charlina3b20472019-06-06 11:12:32 +0100478 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000479}
480
Derek Lamberti901ea112019-12-10 22:07:09 +0000481std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
482 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000483{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100484 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000485}
486
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100487std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
488 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100489{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100490 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100491}
492
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100493std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
494 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000495{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100496 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000497}
498
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100499std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
500 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000501{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100502 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100503}
504
Teresa Charlin970f43b2019-07-01 13:51:07 +0100505std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
506 const WorkloadInfo& info) const
507{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100508 return std::make_unique<RefResizeWorkload>(descriptor, info);
509}
510
telsoa014fcda012018-03-09 14:13:49 +0000511std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
512 const WorkloadInfo& info) const
513{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100514 ResizeQueueDescriptor resizeDescriptor;
515 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
516 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
517 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
518 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
519
520 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000521}
522
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000523std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
524 const WorkloadInfo& info) const
525{
Jan Eilers8eb25602020-03-09 12:13:48 +0000526 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600527 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
528 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
529
530 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000531}
532
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100533std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
534 const WorkloadInfo& info) const
535{
536 return std::make_unique<RefSliceWorkload>(descriptor, info);
537}
538
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100539std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
540 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100541{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100542 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
543}
544
545std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
546 const WorkloadInfo& info) const
547{
548 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
549}
550
551std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
552 const WorkloadInfo& info) const
553{
554 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
555}
556
557std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
558 const WorkloadInfo& info) const
559{
560 return std::make_unique<RefSplitterWorkload>(descriptor, info);
561}
562
563std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
564 const WorkloadInfo& info) const
565{
566 return std::make_unique<RefStackWorkload>(descriptor, info);
567}
568
569std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
570 const WorkloadInfo& info) const
571{
572 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
573}
574
575std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
576 const WorkloadInfo& info) const
577{
578 return std::make_unique<RefSubtractionWorkload>(descriptor, info);
579}
580
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000581std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
582 const WorkloadInfo& info) const
583{
584 if (IsQSymmS16(info))
585 {
586 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
587 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000588 else if (IsBFloat16(info))
589 {
590 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
591 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000592 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
593 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
594}
595
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100596std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
597 const TransposeConvolution2dQueueDescriptor& descriptor,
598 const WorkloadInfo& info) const
599{
600 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100601}
602
Matteo Martincigh49124022019-01-11 13:25:59 +0000603} // namespace armnn