blob: 4ab170139100b7071f77943476ac0c5d3a7c26c1 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +010010#include <reference/workloads/RefFillWorkload.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010012#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010013#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010014#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000015
telsoa014fcda012018-03-09 14:13:49 +000016
17namespace armnn
18{
19
David Beck79141b92018-10-23 16:09:36 +010020namespace
21{
22static const BackendId s_Id{RefBackendId()};
23}
telsoa014fcda012018-03-09 14:13:49 +000024template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
25std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010026 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000027{
Keith Davis5204aa82020-01-27 15:24:59 +000028 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
29 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000030}
31
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010032template <DataType ArmnnType>
33bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010034{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010035 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
36 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010037 if (it != std::end(info.m_InputTensorInfos))
38 {
39 return true;
40 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010041 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010042 if (it != std::end(info.m_OutputTensorInfos))
43 {
44 return true;
45 }
46 return false;
47}
48
Keith Davis0c2eeac2020-02-11 16:51:50 +000049bool IsSigned32(const WorkloadInfo& info)
50{
51 return IsDataType<DataType::Signed32>(info);
52}
53
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000054bool IsBFloat16(const WorkloadInfo& info)
55{
56 return IsDataType<DataType::BFloat16>(info);
57}
58
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010059bool IsFloat16(const WorkloadInfo& info)
60{
61 return IsDataType<DataType::Float16>(info);
62}
63
Keith Davis0c2eeac2020-02-11 16:51:50 +000064bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010065{
Derek Lambertif90c56d2020-01-10 17:14:08 +000066 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010067}
68
Keith Davis0c2eeac2020-02-11 16:51:50 +000069bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000070{
71 return IsDataType<DataType::QSymmS8>(info);
72}
73
Keith Davis67e6c542020-02-19 10:08:33 +000074bool IsQAsymmS8(const WorkloadInfo& info)
75{
76 return IsDataType<DataType::QAsymmS8>(info);
77}
78
79bool IsQAsymmU8(const WorkloadInfo& info)
80{
81 return IsDataType<DataType::QAsymmU8>(info);
82}
83
Matthew Bentham7c1603a2019-06-21 17:22:23 +010084RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
85 : m_MemoryManager(memoryManager)
86{
87}
88
telsoa01c577f2c2018-08-31 09:22:23 +010089RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010090 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000091{
92}
93
David Beck79141b92018-10-23 16:09:36 +010094const BackendId& RefWorkloadFactory::GetBackendId() const
95{
96 return s_Id;
97}
98
David Beck29c75de2018-10-23 13:35:58 +010099bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
100 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100101 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000102{
David Beck79141b92018-10-23 16:09:36 +0100103 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000104}
105
David Monahan3fb7e102019-08-20 11:25:29 +0100106std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000107 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000108{
David Monahan3fb7e102019-08-20 11:25:29 +0100109 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
110 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000111 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100112 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000113}
114
Francis Murtagh351d13d2018-09-24 15:01:18 +0100115std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100116 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000117 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100118{
David Monahan3fb7e102019-08-20 11:25:29 +0100119 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
120 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000121 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100122 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100123}
124
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100125std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
126 const WorkloadInfo& info) const
127{
Jan Eilers8eb25602020-03-09 12:13:48 +0000128 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600129 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
130 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
131
132 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100133}
134
135std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
136 const WorkloadInfo& info) const
137{
138 return std::make_unique<RefActivationWorkload>(descriptor, info);
139}
140
141std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
142 const WorkloadInfo& info) const
143{
Finn Williamscbd2c232020-06-22 15:58:32 +0100144 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
145 {
146 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
147 }
148 else
149 {
150 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
151 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100152}
153
154std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
155 const WorkloadInfo& info) const
156{
157 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
158}
159
160std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
161 const BatchNormalizationQueueDescriptor& descriptor,
162 const WorkloadInfo& info) const
163{
164 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
165}
166
167std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
168 const WorkloadInfo& info) const
169{
170 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
171}
172
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100173std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
174 const WorkloadInfo& info) const
175{
176 return std::make_unique<RefComparisonWorkload>(descriptor, info);
177}
178
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100179std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
180 const WorkloadInfo& info) const
181{
182 return std::make_unique<RefConcatWorkload>(descriptor, info);
183}
184
185std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
186 const WorkloadInfo& info) const
187{
188 return std::make_unique<RefConstantWorkload>(descriptor, info);
189}
190
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000191std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
192 const ConvertBf16ToFp32QueueDescriptor& descriptor,
193 const WorkloadInfo& info) const
194{
195 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
196}
197
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100198std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
199 const ConvertFp16ToFp32QueueDescriptor& descriptor,
200 const WorkloadInfo& info) const
201{
202 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
203}
204
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000205std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
206 const ConvertFp32ToBf16QueueDescriptor& descriptor,
207 const WorkloadInfo& info) const
208{
209 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
210}
211
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100212std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
213 const ConvertFp32ToFp16QueueDescriptor& descriptor,
214 const WorkloadInfo& info) const
215{
216 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
217}
218
219std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
220 const WorkloadInfo& info) const
221{
222 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
223}
224
225std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
226 const WorkloadInfo& info) const
227{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000228 if (IsBFloat16(info))
229 {
230 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
231 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000232 if (IsFloat16(info))
233 {
234 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
235 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000236 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100237 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000238 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100239 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000240 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000241 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000242 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000243 }
Keith Davis67e6c542020-02-19 10:08:33 +0000244 if (IsQAsymmU8(info))
245 {
246 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
247 }
248 if (IsQAsymmS8(info))
249 {
250 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
251 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000252 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000253 {
254 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
255 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000256
Keith Davis0c2eeac2020-02-11 16:51:50 +0000257 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100258}
259
260std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
261 const WorkloadInfo& info) const
262{
263 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
264}
265
266std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
267 const DepthwiseConvolution2dQueueDescriptor& descriptor,
268 const WorkloadInfo& info) const
269{
270 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
271}
272
273std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
274 const WorkloadInfo& info) const
275{
276 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
277}
278
279std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
280 const DetectionPostProcessQueueDescriptor& descriptor,
281 const WorkloadInfo& info) const
282{
283 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
284}
285
286std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
287 const WorkloadInfo& info) const
288{
Finn Williamscbd2c232020-06-22 15:58:32 +0100289 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
290 {
291 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
292 }
293 else
294 {
295 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
296 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100297}
298
josh minor4a3c6102020-01-06 16:40:46 -0600299std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
300 const WorkloadInfo& info) const
301{
302 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
303}
304
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100305std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
306 const WorkloadInfo& info) const
307{
Jan Eilers8eb25602020-03-09 12:13:48 +0000308 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100309 ComparisonQueueDescriptor comparisonDescriptor;
310 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
311
312 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100313}
314
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100315std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
316 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100317{
318 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
319}
320
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100321std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
322 const WorkloadInfo& info) const
323{
324 return std::make_unique<RefFillWorkload>(descriptor, info);
325}
326
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100327std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
328 const WorkloadInfo& info) const
329{
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100330 if(IsQuantizedType(info.m_InputTensorInfos[0].GetDataType()))
331 {
332 return nullptr;
333 }
334 else
335 {
336 return std::make_unique<RefFloorWorkload>(descriptor, info);
337 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100338}
339
340std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
341 const FullyConnectedQueueDescriptor& descriptor,
342 const WorkloadInfo& info) const
343{
344 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
345}
346
347std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
348 const WorkloadInfo& info) const
349{
350 return std::make_unique<RefGatherWorkload>(descriptor, info);
351}
352
353std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
354 const WorkloadInfo& info) const
355{
Jan Eilers8eb25602020-03-09 12:13:48 +0000356 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100357 ComparisonQueueDescriptor comparisonDescriptor;
358 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
359
360 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100361}
362
telsoa014fcda012018-03-09 14:13:49 +0000363std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
364 const WorkloadInfo& info) const
365{
366 if (info.m_InputTensorInfos.empty() )
367 {
368 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
369 }
370 if (info.m_OutputTensorInfos.empty())
371 {
372 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
373 }
374
375 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
376 {
377 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
378 }
379
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100380 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000381}
382
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100383std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
384 const InstanceNormalizationQueueDescriptor& descriptor,
385 const WorkloadInfo& info) const
386{
387 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
388}
389
390std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
391 const WorkloadInfo& info) const
392{
393 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
394}
395
396std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
397 const WorkloadInfo& info) const
398{
399 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
400}
401
402std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
403 const WorkloadInfo& info) const
404{
405 return std::make_unique<RefLstmWorkload>(descriptor, info);
406}
407
408std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
409 const WorkloadInfo& info) const
410{
Finn Williamscbd2c232020-06-22 15:58:32 +0100411 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
412 {
413 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
414 }
415 else
416 {
417 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
418 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100419}
420
421std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
422 const WorkloadInfo& info) const
423{
424 return std::make_unique<RefMeanWorkload>(descriptor, info);
425}
426
427std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
428 const WorkloadInfo& info) const
429{
430 if (descriptor.m_Inputs.empty())
431 {
432 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
433 }
434 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
435}
436
437std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
438 const WorkloadInfo& info) const
439{
440 if (descriptor.m_Inputs.empty())
441 {
442 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
443 }
444 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
445}
446
447std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
448 const WorkloadInfo& info) const
449{
450 return CreateConcat(descriptor, info);
451}
452
453std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
454 const WorkloadInfo& info) const
455{
Finn Williamscbd2c232020-06-22 15:58:32 +0100456 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
457 {
458 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
459 }
460 else
461 {
462 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
463 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100464}
465
466std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
467 const WorkloadInfo& info) const
468{
Finn Williamscbd2c232020-06-22 15:58:32 +0100469 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
470 {
471 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
472 }
473 else
474 {
475 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
476 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100477}
478
479std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
480 const WorkloadInfo& info) const
481{
482 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
483}
484
telsoa014fcda012018-03-09 14:13:49 +0000485std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
486 const WorkloadInfo& info) const
487{
488 if (info.m_InputTensorInfos.empty() )
489 {
490 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
491 }
492 if (info.m_OutputTensorInfos.empty())
493 {
494 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
495 }
496 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
497 {
498 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
499 }
500
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100501 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000502}
503
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100504std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
505 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000506{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100507 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000508}
509
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100510std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
511 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000512{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000513 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100514 {
515 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
516 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000517 else if (IsBFloat16(info))
518 {
519 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
520 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100521 else if (IsQAsymmS8(info))
522 {
523 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
524 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100525 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000526 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000527}
528
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100529std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
530 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000531{
Teresa Charlina3b20472019-06-06 11:12:32 +0100532 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000533}
534
Derek Lamberti901ea112019-12-10 22:07:09 +0000535std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
536 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000537{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100538 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000539}
540
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100541std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
542 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100543{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100544 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100545}
546
James Conroy4f1f8992020-04-29 20:01:10 +0100547std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
548 const WorkloadInfo& info) const
549{
550 return std::make_unique<RefQLstmWorkload>(descriptor, info);
551}
552
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100553std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
554 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000555{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100556 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000557}
558
Finn Williams2605b232020-06-10 15:53:46 +0100559std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
560 const WorkloadInfo& info) const
561{
562 return std::make_unique<RefRankWorkload>(descriptor, info);
563}
564
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100565std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
566 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000567{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100568 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100569}
570
Teresa Charlin970f43b2019-07-01 13:51:07 +0100571std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
572 const WorkloadInfo& info) const
573{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100574 return std::make_unique<RefResizeWorkload>(descriptor, info);
575}
576
telsoa014fcda012018-03-09 14:13:49 +0000577std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
578 const WorkloadInfo& info) const
579{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100580 ResizeQueueDescriptor resizeDescriptor;
581 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
582 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
583 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
584 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
585
586 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000587}
588
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000589std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
590 const WorkloadInfo& info) const
591{
Jan Eilers8eb25602020-03-09 12:13:48 +0000592 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600593 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
594 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
595
596 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000597}
598
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100599std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
600 const WorkloadInfo& info) const
601{
602 return std::make_unique<RefSliceWorkload>(descriptor, info);
603}
604
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100605std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
606 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100607{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100608 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
609}
610
611std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
612 const WorkloadInfo& info) const
613{
614 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
615}
616
617std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
618 const WorkloadInfo& info) const
619{
620 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
621}
622
623std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
624 const WorkloadInfo& info) const
625{
626 return std::make_unique<RefSplitterWorkload>(descriptor, info);
627}
628
629std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
630 const WorkloadInfo& info) const
631{
632 return std::make_unique<RefStackWorkload>(descriptor, info);
633}
634
635std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
636 const WorkloadInfo& info) const
637{
638 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
639}
640
641std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
642 const WorkloadInfo& info) const
643{
Finn Williamscbd2c232020-06-22 15:58:32 +0100644 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
645 {
646 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
647 }
648 else
649 {
650 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
651 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100652}
653
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000654std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
655 const WorkloadInfo& info) const
656{
657 if (IsQSymmS16(info))
658 {
659 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
660 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000661 else if (IsBFloat16(info))
662 {
663 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
664 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100665 else if (IsQAsymmS8(info))
666 {
667 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
668 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000669 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
670 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
671}
672
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100673std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
674 const TransposeConvolution2dQueueDescriptor& descriptor,
675 const WorkloadInfo& info) const
676{
677 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100678}
679
Matteo Martincigh49124022019-01-11 13:25:59 +0000680} // namespace armnn