blob: 1d82421490dc7591095c776e348aa2c908399ce0 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010011#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010012#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010013#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
telsoa014fcda012018-03-09 14:13:49 +000015
16namespace armnn
17{
18
David Beck79141b92018-10-23 16:09:36 +010019namespace
20{
21static const BackendId s_Id{RefBackendId()};
22}
telsoa014fcda012018-03-09 14:13:49 +000023template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
24std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010025 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000026{
Keith Davis5204aa82020-01-27 15:24:59 +000027 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
28 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010031template <DataType ArmnnType>
32bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010033{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010034 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
35 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010036 if (it != std::end(info.m_InputTensorInfos))
37 {
38 return true;
39 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010040 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010041 if (it != std::end(info.m_OutputTensorInfos))
42 {
43 return true;
44 }
45 return false;
46}
47
Keith Davis0c2eeac2020-02-11 16:51:50 +000048bool IsSigned32(const WorkloadInfo& info)
49{
50 return IsDataType<DataType::Signed32>(info);
51}
52
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000053bool IsBFloat16(const WorkloadInfo& info)
54{
55 return IsDataType<DataType::BFloat16>(info);
56}
57
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010058bool IsFloat16(const WorkloadInfo& info)
59{
60 return IsDataType<DataType::Float16>(info);
61}
62
Keith Davis0c2eeac2020-02-11 16:51:50 +000063bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010064{
Derek Lambertif90c56d2020-01-10 17:14:08 +000065 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010066}
67
Keith Davis0c2eeac2020-02-11 16:51:50 +000068bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000069{
70 return IsDataType<DataType::QSymmS8>(info);
71}
72
Keith Davis67e6c542020-02-19 10:08:33 +000073bool IsQAsymmS8(const WorkloadInfo& info)
74{
75 return IsDataType<DataType::QAsymmS8>(info);
76}
77
78bool IsQAsymmU8(const WorkloadInfo& info)
79{
80 return IsDataType<DataType::QAsymmU8>(info);
81}
82
Matthew Bentham7c1603a2019-06-21 17:22:23 +010083RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
84 : m_MemoryManager(memoryManager)
85{
86}
87
telsoa01c577f2c2018-08-31 09:22:23 +010088RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010089 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000090{
91}
92
David Beck79141b92018-10-23 16:09:36 +010093const BackendId& RefWorkloadFactory::GetBackendId() const
94{
95 return s_Id;
96}
97
David Beck29c75de2018-10-23 13:35:58 +010098bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
99 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100100 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000101{
David Beck79141b92018-10-23 16:09:36 +0100102 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000103}
104
David Monahan3fb7e102019-08-20 11:25:29 +0100105std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000106 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000107{
David Monahan3fb7e102019-08-20 11:25:29 +0100108 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
109 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000110 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100111 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000112}
113
Francis Murtagh351d13d2018-09-24 15:01:18 +0100114std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100115 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000116 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100117{
David Monahan3fb7e102019-08-20 11:25:29 +0100118 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
119 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000120 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100121 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122}
123
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100124std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
125 const WorkloadInfo& info) const
126{
Jan Eilers8eb25602020-03-09 12:13:48 +0000127 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600128 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
129 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
130
131 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100132}
133
134std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
135 const WorkloadInfo& info) const
136{
137 return std::make_unique<RefActivationWorkload>(descriptor, info);
138}
139
140std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
141 const WorkloadInfo& info) const
142{
143 return std::make_unique<RefAdditionWorkload>(descriptor, info);
144}
145
146std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
147 const WorkloadInfo& info) const
148{
149 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
150}
151
152std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
153 const BatchNormalizationQueueDescriptor& descriptor,
154 const WorkloadInfo& info) const
155{
156 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
157}
158
159std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
160 const WorkloadInfo& info) const
161{
162 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
163}
164
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100165std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
166 const WorkloadInfo& info) const
167{
168 return std::make_unique<RefComparisonWorkload>(descriptor, info);
169}
170
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100171std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
172 const WorkloadInfo& info) const
173{
174 return std::make_unique<RefConcatWorkload>(descriptor, info);
175}
176
177std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
178 const WorkloadInfo& info) const
179{
180 return std::make_unique<RefConstantWorkload>(descriptor, info);
181}
182
183std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
184 const ConvertFp16ToFp32QueueDescriptor& descriptor,
185 const WorkloadInfo& info) const
186{
187 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
188}
189
190std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
191 const ConvertFp32ToFp16QueueDescriptor& descriptor,
192 const WorkloadInfo& info) const
193{
194 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
195}
196
197std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
198 const WorkloadInfo& info) const
199{
200 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
201}
202
203std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
204 const WorkloadInfo& info) const
205{
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000206 if (IsFloat16(info))
207 {
208 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
209 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000210 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100211 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000212 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100213 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000214 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000215 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000216 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000217 }
Keith Davis67e6c542020-02-19 10:08:33 +0000218 if (IsQAsymmU8(info))
219 {
220 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
221 }
222 if (IsQAsymmS8(info))
223 {
224 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
225 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000226 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000227 {
228 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
229 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000230
Keith Davis0c2eeac2020-02-11 16:51:50 +0000231 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100232}
233
234std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
235 const WorkloadInfo& info) const
236{
237 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
238}
239
240std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
241 const DepthwiseConvolution2dQueueDescriptor& descriptor,
242 const WorkloadInfo& info) const
243{
244 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
245}
246
247std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
248 const WorkloadInfo& info) const
249{
250 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
251}
252
253std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
254 const DetectionPostProcessQueueDescriptor& descriptor,
255 const WorkloadInfo& info) const
256{
257 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
258}
259
260std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
261 const WorkloadInfo& info) const
262{
263 return std::make_unique<RefDivisionWorkload>(descriptor, info);
264}
265
josh minor4a3c6102020-01-06 16:40:46 -0600266std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
267 const WorkloadInfo& info) const
268{
269 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
270}
271
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100272std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
273 const WorkloadInfo& info) const
274{
Jan Eilers8eb25602020-03-09 12:13:48 +0000275 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100276 ComparisonQueueDescriptor comparisonDescriptor;
277 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
278
279 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100280}
281
282std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(
283 const FakeQuantizationQueueDescriptor& descriptor,
284 const WorkloadInfo& info) const
285{
286 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
287}
288
289std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
290 const WorkloadInfo& info) const
291{
292 return std::make_unique<RefFloorWorkload>(descriptor, info);
293}
294
295std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
296 const FullyConnectedQueueDescriptor& descriptor,
297 const WorkloadInfo& info) const
298{
299 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
300}
301
302std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
303 const WorkloadInfo& info) const
304{
305 return std::make_unique<RefGatherWorkload>(descriptor, info);
306}
307
308std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
309 const WorkloadInfo& info) const
310{
Jan Eilers8eb25602020-03-09 12:13:48 +0000311 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100312 ComparisonQueueDescriptor comparisonDescriptor;
313 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
314
315 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100316}
317
telsoa014fcda012018-03-09 14:13:49 +0000318std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
319 const WorkloadInfo& info) const
320{
321 if (info.m_InputTensorInfos.empty() )
322 {
323 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
324 }
325 if (info.m_OutputTensorInfos.empty())
326 {
327 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
328 }
329
330 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
331 {
332 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
333 }
334
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100335 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000336}
337
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100338std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
339 const InstanceNormalizationQueueDescriptor& descriptor,
340 const WorkloadInfo& info) const
341{
342 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
343}
344
345std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
346 const WorkloadInfo& info) const
347{
348 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
349}
350
351std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
352 const WorkloadInfo& info) const
353{
354 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
355}
356
357std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
358 const WorkloadInfo& info) const
359{
360 return std::make_unique<RefLstmWorkload>(descriptor, info);
361}
362
363std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
364 const WorkloadInfo& info) const
365{
366 return std::make_unique<RefMaximumWorkload>(descriptor, info);
367}
368
369std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
370 const WorkloadInfo& info) const
371{
372 return std::make_unique<RefMeanWorkload>(descriptor, info);
373}
374
375std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
376 const WorkloadInfo& info) const
377{
378 if (descriptor.m_Inputs.empty())
379 {
380 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
381 }
382 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
383}
384
385std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
386 const WorkloadInfo& info) const
387{
388 if (descriptor.m_Inputs.empty())
389 {
390 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
391 }
392 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
393}
394
395std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
396 const WorkloadInfo& info) const
397{
398 return CreateConcat(descriptor, info);
399}
400
401std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
402 const WorkloadInfo& info) const
403{
404 return std::make_unique<RefMinimumWorkload>(descriptor, info);
405}
406
407std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
408 const WorkloadInfo& info) const
409{
410 return std::make_unique<RefMultiplicationWorkload>(descriptor, info);
411}
412
413std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
414 const WorkloadInfo& info) const
415{
416 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
417}
418
telsoa014fcda012018-03-09 14:13:49 +0000419std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
420 const WorkloadInfo& info) const
421{
422 if (info.m_InputTensorInfos.empty() )
423 {
424 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
425 }
426 if (info.m_OutputTensorInfos.empty())
427 {
428 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
429 }
430 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
431 {
432 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
433 }
434
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100435 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000436}
437
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100438std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
439 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000440{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000441 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100442 {
443 return std::make_unique<RefPadQSymm16Workload>(descriptor, info);
444 }
445 else if (IsFloat16(info))
446 {
447 return std::make_unique<RefPadFloat16Workload>(descriptor, info);
448 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000449 else if (IsBFloat16(info))
450 {
451 return std::make_unique<RefPadBFloat16Workload>(descriptor, info);
452 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100453 return MakeWorkload<RefPadFloat32Workload, RefPadQAsymm8Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000454}
455
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100456std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
457 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000458{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000459 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100460 {
461 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
462 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000463 else if (IsBFloat16(info))
464 {
465 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
466 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100467 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000468 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000469}
470
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100471std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
472 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000473{
Teresa Charlina3b20472019-06-06 11:12:32 +0100474 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000475}
476
Derek Lamberti901ea112019-12-10 22:07:09 +0000477std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
478 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000479{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100480 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000481}
482
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100483std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
484 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100485{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100486 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100487}
488
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100489std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
490 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000491{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100492 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000493}
494
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100495std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
496 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000497{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100498 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100499}
500
Teresa Charlin970f43b2019-07-01 13:51:07 +0100501std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
502 const WorkloadInfo& info) const
503{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100504 return std::make_unique<RefResizeWorkload>(descriptor, info);
505}
506
telsoa014fcda012018-03-09 14:13:49 +0000507std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
508 const WorkloadInfo& info) const
509{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100510 ResizeQueueDescriptor resizeDescriptor;
511 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
512 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
513 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
514 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
515
516 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000517}
518
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000519std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
520 const WorkloadInfo& info) const
521{
Jan Eilers8eb25602020-03-09 12:13:48 +0000522 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600523 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
524 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
525
526 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000527}
528
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100529std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
530 const WorkloadInfo& info) const
531{
532 return std::make_unique<RefSliceWorkload>(descriptor, info);
533}
534
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100535std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
536 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100537{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100538 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
539}
540
541std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
542 const WorkloadInfo& info) const
543{
544 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
545}
546
547std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
548 const WorkloadInfo& info) const
549{
550 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
551}
552
553std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
554 const WorkloadInfo& info) const
555{
556 return std::make_unique<RefSplitterWorkload>(descriptor, info);
557}
558
559std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
560 const WorkloadInfo& info) const
561{
562 return std::make_unique<RefStackWorkload>(descriptor, info);
563}
564
565std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
566 const WorkloadInfo& info) const
567{
568 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
569}
570
571std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
572 const WorkloadInfo& info) const
573{
574 return std::make_unique<RefSubtractionWorkload>(descriptor, info);
575}
576
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000577std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
578 const WorkloadInfo& info) const
579{
580 if (IsQSymmS16(info))
581 {
582 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
583 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000584 else if (IsBFloat16(info))
585 {
586 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
587 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000588 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
589 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
590}
591
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100592std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
593 const TransposeConvolution2dQueueDescriptor& descriptor,
594 const WorkloadInfo& info) const
595{
596 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100597}
598
Matteo Martincigh49124022019-01-11 13:25:59 +0000599} // namespace armnn