blob: e7e57b15d1826cd1a235ab116ba221a911d1aee6 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +010010#include <reference/workloads/RefFillWorkload.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010012#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010013#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010014#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000015
telsoa014fcda012018-03-09 14:13:49 +000016
17namespace armnn
18{
19
David Beck79141b92018-10-23 16:09:36 +010020namespace
21{
22static const BackendId s_Id{RefBackendId()};
23}
telsoa014fcda012018-03-09 14:13:49 +000024template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
25std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010026 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000027{
Keith Davis5204aa82020-01-27 15:24:59 +000028 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
29 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000030}
31
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010032template <DataType ArmnnType>
33bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010034{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010035 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
36 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010037 if (it != std::end(info.m_InputTensorInfos))
38 {
39 return true;
40 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010041 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010042 if (it != std::end(info.m_OutputTensorInfos))
43 {
44 return true;
45 }
46 return false;
47}
48
Keith Davis0c2eeac2020-02-11 16:51:50 +000049bool IsSigned32(const WorkloadInfo& info)
50{
51 return IsDataType<DataType::Signed32>(info);
52}
53
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000054bool IsBFloat16(const WorkloadInfo& info)
55{
56 return IsDataType<DataType::BFloat16>(info);
57}
58
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010059bool IsFloat16(const WorkloadInfo& info)
60{
61 return IsDataType<DataType::Float16>(info);
62}
63
Keith Davis0c2eeac2020-02-11 16:51:50 +000064bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010065{
Derek Lambertif90c56d2020-01-10 17:14:08 +000066 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010067}
68
Keith Davis0c2eeac2020-02-11 16:51:50 +000069bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000070{
71 return IsDataType<DataType::QSymmS8>(info);
72}
73
Keith Davis67e6c542020-02-19 10:08:33 +000074bool IsQAsymmS8(const WorkloadInfo& info)
75{
76 return IsDataType<DataType::QAsymmS8>(info);
77}
78
79bool IsQAsymmU8(const WorkloadInfo& info)
80{
81 return IsDataType<DataType::QAsymmU8>(info);
82}
83
Matthew Bentham7c1603a2019-06-21 17:22:23 +010084RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
85 : m_MemoryManager(memoryManager)
86{
87}
88
telsoa01c577f2c2018-08-31 09:22:23 +010089RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010090 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000091{
92}
93
David Beck79141b92018-10-23 16:09:36 +010094const BackendId& RefWorkloadFactory::GetBackendId() const
95{
96 return s_Id;
97}
98
David Beck29c75de2018-10-23 13:35:58 +010099bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
100 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100101 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000102{
David Beck79141b92018-10-23 16:09:36 +0100103 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000104}
105
Sadik Armagan04a72972020-09-14 15:44:18 +0100106bool RefWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
107 Optional<DataType> dataType,
108 std::string& outReasonIfUnsupported,
109 const ModelOptions& modelOptions)
110{
111 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
112}
113
David Monahan3fb7e102019-08-20 11:25:29 +0100114std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000115 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000116{
David Monahan3fb7e102019-08-20 11:25:29 +0100117 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
118 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000119 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100120 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000121}
122
Francis Murtagh351d13d2018-09-24 15:01:18 +0100123std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100124 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000125 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100126{
David Monahan3fb7e102019-08-20 11:25:29 +0100127 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
128 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000129 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100130 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100131}
132
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100133std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
134 const WorkloadInfo& info) const
135{
Jan Eilers8eb25602020-03-09 12:13:48 +0000136 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600137 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
138 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
139
140 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100141}
142
143std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
144 const WorkloadInfo& info) const
145{
146 return std::make_unique<RefActivationWorkload>(descriptor, info);
147}
148
149std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
150 const WorkloadInfo& info) const
151{
Finn Williamscbd2c232020-06-22 15:58:32 +0100152 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
153 {
154 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
155 }
156 else
157 {
158 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
159 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100160}
161
162std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
163 const WorkloadInfo& info) const
164{
165 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
166}
167
168std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
169 const BatchNormalizationQueueDescriptor& descriptor,
170 const WorkloadInfo& info) const
171{
172 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
173}
174
175std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
176 const WorkloadInfo& info) const
177{
178 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
179}
180
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100181std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
182 const WorkloadInfo& info) const
183{
184 return std::make_unique<RefComparisonWorkload>(descriptor, info);
185}
186
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100187std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
188 const WorkloadInfo& info) const
189{
190 return std::make_unique<RefConcatWorkload>(descriptor, info);
191}
192
193std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
194 const WorkloadInfo& info) const
195{
196 return std::make_unique<RefConstantWorkload>(descriptor, info);
197}
198
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000199std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
200 const ConvertBf16ToFp32QueueDescriptor& descriptor,
201 const WorkloadInfo& info) const
202{
203 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
204}
205
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100206std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
207 const ConvertFp16ToFp32QueueDescriptor& descriptor,
208 const WorkloadInfo& info) const
209{
210 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
211}
212
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000213std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
214 const ConvertFp32ToBf16QueueDescriptor& descriptor,
215 const WorkloadInfo& info) const
216{
217 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
218}
219
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100220std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
221 const ConvertFp32ToFp16QueueDescriptor& descriptor,
222 const WorkloadInfo& info) const
223{
224 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
225}
226
227std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
228 const WorkloadInfo& info) const
229{
230 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
231}
232
233std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
234 const WorkloadInfo& info) const
235{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000236 if (IsBFloat16(info))
237 {
238 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
239 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000240 if (IsFloat16(info))
241 {
242 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
243 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000244 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100245 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000246 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100247 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000248 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000249 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000250 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000251 }
Keith Davis67e6c542020-02-19 10:08:33 +0000252 if (IsQAsymmU8(info))
253 {
254 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
255 }
256 if (IsQAsymmS8(info))
257 {
258 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
259 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000260 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000261 {
262 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
263 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000264
Keith Davis0c2eeac2020-02-11 16:51:50 +0000265 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100266}
267
268std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
269 const WorkloadInfo& info) const
270{
271 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
272}
273
274std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
275 const DepthwiseConvolution2dQueueDescriptor& descriptor,
276 const WorkloadInfo& info) const
277{
278 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
279}
280
281std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
282 const WorkloadInfo& info) const
283{
284 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
285}
286
287std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
288 const DetectionPostProcessQueueDescriptor& descriptor,
289 const WorkloadInfo& info) const
290{
291 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
292}
293
294std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
295 const WorkloadInfo& info) const
296{
Finn Williamscbd2c232020-06-22 15:58:32 +0100297 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
298 {
299 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
300 }
301 else
302 {
303 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
304 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100305}
306
josh minor4a3c6102020-01-06 16:40:46 -0600307std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
308 const WorkloadInfo& info) const
309{
310 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
311}
312
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100313std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
314 const WorkloadInfo& info) const
315{
Jan Eilers8eb25602020-03-09 12:13:48 +0000316 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100317 ComparisonQueueDescriptor comparisonDescriptor;
318 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
319
320 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100321}
322
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100323std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
324 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100325{
326 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
327}
328
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100329std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
330 const WorkloadInfo& info) const
331{
332 return std::make_unique<RefFillWorkload>(descriptor, info);
333}
334
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100335std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
336 const WorkloadInfo& info) const
337{
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100338 if(IsQuantizedType(info.m_InputTensorInfos[0].GetDataType()))
339 {
340 return nullptr;
341 }
342 else
343 {
344 return std::make_unique<RefFloorWorkload>(descriptor, info);
345 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100346}
347
348std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
349 const FullyConnectedQueueDescriptor& descriptor,
350 const WorkloadInfo& info) const
351{
352 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
353}
354
355std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
356 const WorkloadInfo& info) const
357{
358 return std::make_unique<RefGatherWorkload>(descriptor, info);
359}
360
361std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
362 const WorkloadInfo& info) const
363{
Jan Eilers8eb25602020-03-09 12:13:48 +0000364 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100365 ComparisonQueueDescriptor comparisonDescriptor;
366 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
367
368 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100369}
370
telsoa014fcda012018-03-09 14:13:49 +0000371std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
372 const WorkloadInfo& info) const
373{
374 if (info.m_InputTensorInfos.empty() )
375 {
376 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
377 }
378 if (info.m_OutputTensorInfos.empty())
379 {
380 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
381 }
382
383 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
384 {
385 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
386 }
387
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100388 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000389}
390
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100391std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
392 const InstanceNormalizationQueueDescriptor& descriptor,
393 const WorkloadInfo& info) const
394{
395 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
396}
397
398std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
399 const WorkloadInfo& info) const
400{
401 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
402}
403
404std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
405 const WorkloadInfo& info) const
406{
407 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
408}
409
410std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
411 const WorkloadInfo& info) const
412{
413 return std::make_unique<RefLstmWorkload>(descriptor, info);
414}
415
416std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
417 const WorkloadInfo& info) const
418{
Finn Williamscbd2c232020-06-22 15:58:32 +0100419 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
420 {
421 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
422 }
423 else
424 {
425 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
426 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100427}
428
429std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
430 const WorkloadInfo& info) const
431{
432 return std::make_unique<RefMeanWorkload>(descriptor, info);
433}
434
435std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
436 const WorkloadInfo& info) const
437{
438 if (descriptor.m_Inputs.empty())
439 {
440 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
441 }
442 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
443}
444
445std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
446 const WorkloadInfo& info) const
447{
448 if (descriptor.m_Inputs.empty())
449 {
450 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
451 }
452 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
453}
454
455std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
456 const WorkloadInfo& info) const
457{
458 return CreateConcat(descriptor, info);
459}
460
461std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
462 const WorkloadInfo& info) const
463{
Finn Williamscbd2c232020-06-22 15:58:32 +0100464 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
465 {
466 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
467 }
468 else
469 {
470 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
471 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100472}
473
474std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
475 const WorkloadInfo& info) const
476{
Finn Williamscbd2c232020-06-22 15:58:32 +0100477 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
478 {
479 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
480 }
481 else
482 {
483 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
484 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100485}
486
487std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
488 const WorkloadInfo& info) const
489{
490 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
491}
492
telsoa014fcda012018-03-09 14:13:49 +0000493std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
494 const WorkloadInfo& info) const
495{
496 if (info.m_InputTensorInfos.empty() )
497 {
498 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
499 }
500 if (info.m_OutputTensorInfos.empty())
501 {
502 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
503 }
504 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
505 {
506 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
507 }
508
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100509 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000510}
511
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100512std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
513 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000514{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100515 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000516}
517
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100518std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
519 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000520{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000521 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100522 {
523 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
524 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000525 else if (IsBFloat16(info))
526 {
527 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
528 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100529 else if (IsQAsymmS8(info))
530 {
531 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
532 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100533 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000534 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000535}
536
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100537std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
538 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000539{
Teresa Charlina3b20472019-06-06 11:12:32 +0100540 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000541}
542
Derek Lamberti901ea112019-12-10 22:07:09 +0000543std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
544 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000545{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100546 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000547}
548
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100549std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
550 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100551{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100552 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100553}
554
James Conroy4f1f8992020-04-29 20:01:10 +0100555std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
556 const WorkloadInfo& info) const
557{
558 return std::make_unique<RefQLstmWorkload>(descriptor, info);
559}
560
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100561std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
562 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000563{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100564 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000565}
566
Finn Williams2605b232020-06-10 15:53:46 +0100567std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
568 const WorkloadInfo& info) const
569{
570 return std::make_unique<RefRankWorkload>(descriptor, info);
571}
572
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100573std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
574 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000575{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100576 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100577}
578
Teresa Charlin970f43b2019-07-01 13:51:07 +0100579std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
580 const WorkloadInfo& info) const
581{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100582 return std::make_unique<RefResizeWorkload>(descriptor, info);
583}
584
telsoa014fcda012018-03-09 14:13:49 +0000585std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
586 const WorkloadInfo& info) const
587{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100588 ResizeQueueDescriptor resizeDescriptor;
589 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
590 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
591 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
592 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
593
594 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000595}
596
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000597std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
598 const WorkloadInfo& info) const
599{
Jan Eilers8eb25602020-03-09 12:13:48 +0000600 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600601 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
602 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
603
604 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000605}
606
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100607std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
608 const WorkloadInfo& info) const
609{
610 return std::make_unique<RefSliceWorkload>(descriptor, info);
611}
612
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100613std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
614 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100615{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100616 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
617}
618
619std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
620 const WorkloadInfo& info) const
621{
622 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
623}
624
625std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
626 const WorkloadInfo& info) const
627{
628 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
629}
630
631std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
632 const WorkloadInfo& info) const
633{
634 return std::make_unique<RefSplitterWorkload>(descriptor, info);
635}
636
637std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
638 const WorkloadInfo& info) const
639{
640 return std::make_unique<RefStackWorkload>(descriptor, info);
641}
642
643std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
644 const WorkloadInfo& info) const
645{
646 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
647}
648
649std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
650 const WorkloadInfo& info) const
651{
Finn Williamscbd2c232020-06-22 15:58:32 +0100652 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
653 {
654 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
655 }
656 else
657 {
658 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
659 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100660}
661
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000662std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
663 const WorkloadInfo& info) const
664{
665 if (IsQSymmS16(info))
666 {
667 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
668 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000669 else if (IsBFloat16(info))
670 {
671 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
672 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100673 else if (IsQAsymmS8(info))
674 {
675 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
676 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000677 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
678 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
679}
680
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100681std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
682 const TransposeConvolution2dQueueDescriptor& descriptor,
683 const WorkloadInfo& info) const
684{
685 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100686}
687
Matteo Martincigh49124022019-01-11 13:25:59 +0000688} // namespace armnn