blob: cac1d1bd8a818e9d98c9047905f651d2ba5ed862 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +010010#include <reference/workloads/RefFillWorkload.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010012#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010013#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010014#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000015
telsoa014fcda012018-03-09 14:13:49 +000016
17namespace armnn
18{
19
David Beck79141b92018-10-23 16:09:36 +010020namespace
21{
22static const BackendId s_Id{RefBackendId()};
23}
telsoa014fcda012018-03-09 14:13:49 +000024template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
25std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010026 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000027{
Keith Davis5204aa82020-01-27 15:24:59 +000028 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
29 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000030}
31
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010032template <DataType ArmnnType>
33bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010034{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010035 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
36 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010037 if (it != std::end(info.m_InputTensorInfos))
38 {
39 return true;
40 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010041 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010042 if (it != std::end(info.m_OutputTensorInfos))
43 {
44 return true;
45 }
46 return false;
47}
48
Keith Davis0c2eeac2020-02-11 16:51:50 +000049bool IsSigned32(const WorkloadInfo& info)
50{
51 return IsDataType<DataType::Signed32>(info);
52}
53
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000054bool IsBFloat16(const WorkloadInfo& info)
55{
56 return IsDataType<DataType::BFloat16>(info);
57}
58
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010059bool IsFloat16(const WorkloadInfo& info)
60{
61 return IsDataType<DataType::Float16>(info);
62}
63
Keith Davis0c2eeac2020-02-11 16:51:50 +000064bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010065{
Derek Lambertif90c56d2020-01-10 17:14:08 +000066 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010067}
68
Keith Davis0c2eeac2020-02-11 16:51:50 +000069bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000070{
71 return IsDataType<DataType::QSymmS8>(info);
72}
73
Keith Davis67e6c542020-02-19 10:08:33 +000074bool IsQAsymmS8(const WorkloadInfo& info)
75{
76 return IsDataType<DataType::QAsymmS8>(info);
77}
78
79bool IsQAsymmU8(const WorkloadInfo& info)
80{
81 return IsDataType<DataType::QAsymmU8>(info);
82}
83
Matthew Bentham7c1603a2019-06-21 17:22:23 +010084RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
85 : m_MemoryManager(memoryManager)
86{
87}
88
telsoa01c577f2c2018-08-31 09:22:23 +010089RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010090 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000091{
92}
93
David Beck79141b92018-10-23 16:09:36 +010094const BackendId& RefWorkloadFactory::GetBackendId() const
95{
96 return s_Id;
97}
98
David Beck29c75de2018-10-23 13:35:58 +010099bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
100 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100101 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000102{
David Beck79141b92018-10-23 16:09:36 +0100103 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000104}
105
David Monahan3fb7e102019-08-20 11:25:29 +0100106std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000107 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000108{
David Monahan3fb7e102019-08-20 11:25:29 +0100109 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
110 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000111 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100112 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000113}
114
Francis Murtagh351d13d2018-09-24 15:01:18 +0100115std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100116 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000117 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100118{
David Monahan3fb7e102019-08-20 11:25:29 +0100119 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
120 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000121 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100122 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100123}
124
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100125std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
126 const WorkloadInfo& info) const
127{
Jan Eilers8eb25602020-03-09 12:13:48 +0000128 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600129 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
130 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
131
132 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100133}
134
135std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
136 const WorkloadInfo& info) const
137{
138 return std::make_unique<RefActivationWorkload>(descriptor, info);
139}
140
141std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
142 const WorkloadInfo& info) const
143{
Finn Williamscbd2c232020-06-22 15:58:32 +0100144 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
145 {
146 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
147 }
148 else
149 {
150 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
151 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100152}
153
154std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
155 const WorkloadInfo& info) const
156{
157 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
158}
159
160std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
161 const BatchNormalizationQueueDescriptor& descriptor,
162 const WorkloadInfo& info) const
163{
164 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
165}
166
167std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
168 const WorkloadInfo& info) const
169{
170 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
171}
172
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100173std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
174 const WorkloadInfo& info) const
175{
176 return std::make_unique<RefComparisonWorkload>(descriptor, info);
177}
178
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100179std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
180 const WorkloadInfo& info) const
181{
182 return std::make_unique<RefConcatWorkload>(descriptor, info);
183}
184
185std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
186 const WorkloadInfo& info) const
187{
188 return std::make_unique<RefConstantWorkload>(descriptor, info);
189}
190
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000191std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
192 const ConvertBf16ToFp32QueueDescriptor& descriptor,
193 const WorkloadInfo& info) const
194{
195 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
196}
197
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100198std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
199 const ConvertFp16ToFp32QueueDescriptor& descriptor,
200 const WorkloadInfo& info) const
201{
202 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
203}
204
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000205std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
206 const ConvertFp32ToBf16QueueDescriptor& descriptor,
207 const WorkloadInfo& info) const
208{
209 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
210}
211
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100212std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
213 const ConvertFp32ToFp16QueueDescriptor& descriptor,
214 const WorkloadInfo& info) const
215{
216 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
217}
218
219std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
220 const WorkloadInfo& info) const
221{
222 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
223}
224
225std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
226 const WorkloadInfo& info) const
227{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000228 if (IsBFloat16(info))
229 {
230 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
231 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000232 if (IsFloat16(info))
233 {
234 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
235 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000236 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100237 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000238 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100239 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000240 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000241 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000242 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000243 }
Keith Davis67e6c542020-02-19 10:08:33 +0000244 if (IsQAsymmU8(info))
245 {
246 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
247 }
248 if (IsQAsymmS8(info))
249 {
250 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
251 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000252 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000253 {
254 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
255 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000256
Keith Davis0c2eeac2020-02-11 16:51:50 +0000257 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100258}
259
260std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
261 const WorkloadInfo& info) const
262{
263 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
264}
265
266std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
267 const DepthwiseConvolution2dQueueDescriptor& descriptor,
268 const WorkloadInfo& info) const
269{
270 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
271}
272
273std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
274 const WorkloadInfo& info) const
275{
276 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
277}
278
279std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
280 const DetectionPostProcessQueueDescriptor& descriptor,
281 const WorkloadInfo& info) const
282{
283 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
284}
285
286std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
287 const WorkloadInfo& info) const
288{
Finn Williamscbd2c232020-06-22 15:58:32 +0100289 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
290 {
291 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
292 }
293 else
294 {
295 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
296 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100297}
298
josh minor4a3c6102020-01-06 16:40:46 -0600299std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
300 const WorkloadInfo& info) const
301{
302 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
303}
304
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100305std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
306 const WorkloadInfo& info) const
307{
Jan Eilers8eb25602020-03-09 12:13:48 +0000308 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100309 ComparisonQueueDescriptor comparisonDescriptor;
310 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
311
312 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100313}
314
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100315std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
316 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100317{
318 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
319}
320
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100321std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
322 const WorkloadInfo& info) const
323{
324 return std::make_unique<RefFillWorkload>(descriptor, info);
325}
326
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100327std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
328 const WorkloadInfo& info) const
329{
330 return std::make_unique<RefFloorWorkload>(descriptor, info);
331}
332
333std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
334 const FullyConnectedQueueDescriptor& descriptor,
335 const WorkloadInfo& info) const
336{
337 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
338}
339
340std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
341 const WorkloadInfo& info) const
342{
343 return std::make_unique<RefGatherWorkload>(descriptor, info);
344}
345
346std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
347 const WorkloadInfo& info) const
348{
Jan Eilers8eb25602020-03-09 12:13:48 +0000349 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100350 ComparisonQueueDescriptor comparisonDescriptor;
351 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
352
353 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100354}
355
telsoa014fcda012018-03-09 14:13:49 +0000356std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
357 const WorkloadInfo& info) const
358{
359 if (info.m_InputTensorInfos.empty() )
360 {
361 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
362 }
363 if (info.m_OutputTensorInfos.empty())
364 {
365 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
366 }
367
368 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
369 {
370 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
371 }
372
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100373 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000374}
375
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100376std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
377 const InstanceNormalizationQueueDescriptor& descriptor,
378 const WorkloadInfo& info) const
379{
380 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
381}
382
383std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
384 const WorkloadInfo& info) const
385{
386 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
387}
388
389std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
390 const WorkloadInfo& info) const
391{
392 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
393}
394
395std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
396 const WorkloadInfo& info) const
397{
398 return std::make_unique<RefLstmWorkload>(descriptor, info);
399}
400
401std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
402 const WorkloadInfo& info) const
403{
Finn Williamscbd2c232020-06-22 15:58:32 +0100404 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
405 {
406 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
407 }
408 else
409 {
410 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
411 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100412}
413
414std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
415 const WorkloadInfo& info) const
416{
417 return std::make_unique<RefMeanWorkload>(descriptor, info);
418}
419
420std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
421 const WorkloadInfo& info) const
422{
423 if (descriptor.m_Inputs.empty())
424 {
425 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
426 }
427 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
428}
429
430std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
431 const WorkloadInfo& info) const
432{
433 if (descriptor.m_Inputs.empty())
434 {
435 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
436 }
437 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
438}
439
440std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
441 const WorkloadInfo& info) const
442{
443 return CreateConcat(descriptor, info);
444}
445
446std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
447 const WorkloadInfo& info) const
448{
Finn Williamscbd2c232020-06-22 15:58:32 +0100449 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
450 {
451 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
452 }
453 else
454 {
455 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
456 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100457}
458
459std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
460 const WorkloadInfo& info) const
461{
Finn Williamscbd2c232020-06-22 15:58:32 +0100462 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
463 {
464 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
465 }
466 else
467 {
468 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
469 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100470}
471
472std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
473 const WorkloadInfo& info) const
474{
475 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
476}
477
telsoa014fcda012018-03-09 14:13:49 +0000478std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
479 const WorkloadInfo& info) const
480{
481 if (info.m_InputTensorInfos.empty() )
482 {
483 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
484 }
485 if (info.m_OutputTensorInfos.empty())
486 {
487 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
488 }
489 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
490 {
491 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
492 }
493
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100494 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000495}
496
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100497std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
498 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000499{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100500 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000501}
502
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100503std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
504 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000505{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000506 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100507 {
508 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
509 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000510 else if (IsBFloat16(info))
511 {
512 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
513 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100514 else if (IsQAsymmS8(info))
515 {
516 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
517 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100518 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000519 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000520}
521
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100522std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
523 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000524{
Teresa Charlina3b20472019-06-06 11:12:32 +0100525 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000526}
527
Derek Lamberti901ea112019-12-10 22:07:09 +0000528std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
529 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000530{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100531 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000532}
533
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100534std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
535 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100536{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100537 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100538}
539
James Conroy4f1f8992020-04-29 20:01:10 +0100540std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
541 const WorkloadInfo& info) const
542{
543 return std::make_unique<RefQLstmWorkload>(descriptor, info);
544}
545
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100546std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
547 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000548{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100549 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000550}
551
Finn Williams2605b232020-06-10 15:53:46 +0100552std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
553 const WorkloadInfo& info) const
554{
555 return std::make_unique<RefRankWorkload>(descriptor, info);
556}
557
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100558std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
559 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000560{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100561 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100562}
563
Teresa Charlin970f43b2019-07-01 13:51:07 +0100564std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
565 const WorkloadInfo& info) const
566{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100567 return std::make_unique<RefResizeWorkload>(descriptor, info);
568}
569
telsoa014fcda012018-03-09 14:13:49 +0000570std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
571 const WorkloadInfo& info) const
572{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100573 ResizeQueueDescriptor resizeDescriptor;
574 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
575 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
576 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
577 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
578
579 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000580}
581
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000582std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
583 const WorkloadInfo& info) const
584{
Jan Eilers8eb25602020-03-09 12:13:48 +0000585 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600586 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
587 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
588
589 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000590}
591
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100592std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
593 const WorkloadInfo& info) const
594{
595 return std::make_unique<RefSliceWorkload>(descriptor, info);
596}
597
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100598std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
599 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100600{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100601 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
602}
603
604std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
605 const WorkloadInfo& info) const
606{
607 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
608}
609
610std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
611 const WorkloadInfo& info) const
612{
613 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
614}
615
616std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
617 const WorkloadInfo& info) const
618{
619 return std::make_unique<RefSplitterWorkload>(descriptor, info);
620}
621
622std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
623 const WorkloadInfo& info) const
624{
625 return std::make_unique<RefStackWorkload>(descriptor, info);
626}
627
628std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
629 const WorkloadInfo& info) const
630{
631 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
632}
633
634std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
635 const WorkloadInfo& info) const
636{
Finn Williamscbd2c232020-06-22 15:58:32 +0100637 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
638 {
639 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
640 }
641 else
642 {
643 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
644 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100645}
646
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000647std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
648 const WorkloadInfo& info) const
649{
650 if (IsQSymmS16(info))
651 {
652 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
653 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000654 else if (IsBFloat16(info))
655 {
656 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
657 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100658 else if (IsQAsymmS8(info))
659 {
660 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
661 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000662 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
663 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
664}
665
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100666std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
667 const TransposeConvolution2dQueueDescriptor& descriptor,
668 const WorkloadInfo& info) const
669{
670 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100671}
672
Matteo Martincigh49124022019-01-11 13:25:59 +0000673} // namespace armnn