blob: 75008bc866e09643d37555f5d9367691c027c7b3 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01007#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00008#include <backendsCommon/MakeWorkloadHelper.hpp>
James Conroy1f58f032021-04-27 17:13:27 +01009#include <backendsCommon/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010011#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010012#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010013#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
telsoa014fcda012018-03-09 14:13:49 +000015
16namespace armnn
17{
18
David Beck79141b92018-10-23 16:09:36 +010019namespace
20{
21static const BackendId s_Id{RefBackendId()};
22}
telsoa014fcda012018-03-09 14:13:49 +000023template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
24std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010025 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000026{
Keith Davis5204aa82020-01-27 15:24:59 +000027 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
28 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010031template <DataType ArmnnType>
32bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010033{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010034 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
35 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010036 if (it != std::end(info.m_InputTensorInfos))
37 {
38 return true;
39 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010040 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010041 if (it != std::end(info.m_OutputTensorInfos))
42 {
43 return true;
44 }
45 return false;
46}
47
Keith Davis0c2eeac2020-02-11 16:51:50 +000048bool IsSigned32(const WorkloadInfo& info)
49{
50 return IsDataType<DataType::Signed32>(info);
51}
52
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000053bool IsBFloat16(const WorkloadInfo& info)
54{
55 return IsDataType<DataType::BFloat16>(info);
56}
57
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010058bool IsFloat16(const WorkloadInfo& info)
59{
60 return IsDataType<DataType::Float16>(info);
61}
62
Keith Davis0c2eeac2020-02-11 16:51:50 +000063bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010064{
Derek Lambertif90c56d2020-01-10 17:14:08 +000065 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010066}
67
Keith Davis0c2eeac2020-02-11 16:51:50 +000068bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000069{
70 return IsDataType<DataType::QSymmS8>(info);
71}
72
Keith Davis67e6c542020-02-19 10:08:33 +000073bool IsQAsymmS8(const WorkloadInfo& info)
74{
75 return IsDataType<DataType::QAsymmS8>(info);
76}
77
78bool IsQAsymmU8(const WorkloadInfo& info)
79{
80 return IsDataType<DataType::QAsymmU8>(info);
81}
82
Matthew Bentham7c1603a2019-06-21 17:22:23 +010083RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
84 : m_MemoryManager(memoryManager)
85{
86}
87
telsoa01c577f2c2018-08-31 09:22:23 +010088RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010089 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000090{
91}
92
David Beck79141b92018-10-23 16:09:36 +010093const BackendId& RefWorkloadFactory::GetBackendId() const
94{
95 return s_Id;
96}
97
David Beck29c75de2018-10-23 13:35:58 +010098bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
99 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100100 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000101{
David Beck79141b92018-10-23 16:09:36 +0100102 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000103}
104
Sadik Armagan04a72972020-09-14 15:44:18 +0100105bool RefWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
106 Optional<DataType> dataType,
107 std::string& outReasonIfUnsupported,
108 const ModelOptions& modelOptions)
109{
110 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
111}
112
David Monahan3fb7e102019-08-20 11:25:29 +0100113std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000114 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000115{
David Monahan3fb7e102019-08-20 11:25:29 +0100116 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
117 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000118 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100119 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000120}
121
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100123 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000124 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100125{
David Monahan3fb7e102019-08-20 11:25:29 +0100126 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
127 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000128 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100129 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100130}
131
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100132std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
133 const WorkloadInfo& info) const
134{
135 return std::make_unique<RefActivationWorkload>(descriptor, info);
136}
137
138std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
139 const WorkloadInfo& info) const
140{
Finn Williamscbd2c232020-06-22 15:58:32 +0100141 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
142 {
143 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
144 }
145 else
146 {
147 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
148 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100149}
150
151std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
152 const WorkloadInfo& info) const
153{
154 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
155}
156
157std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
158 const BatchNormalizationQueueDescriptor& descriptor,
159 const WorkloadInfo& info) const
160{
161 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
162}
163
164std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
165 const WorkloadInfo& info) const
166{
167 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
168}
169
mathad01b392e982021-04-07 12:07:30 +0100170std::unique_ptr<IWorkload> RefWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
171 const WorkloadInfo& info) const
172{
173 return std::make_unique<RefCastWorkload>(descriptor, info);
174}
175
Simon Obute51f67772021-09-03 15:50:13 +0100176std::unique_ptr<IWorkload> RefWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor &descriptor,
177 const WorkloadInfo &info) const
178{
179 return std::make_unique<RefChannelShuffleWorkload>(descriptor,info);
180}
181
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100182std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
183 const WorkloadInfo& info) const
184{
185 return std::make_unique<RefComparisonWorkload>(descriptor, info);
186}
187
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100188std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
189 const WorkloadInfo& info) const
190{
191 return std::make_unique<RefConcatWorkload>(descriptor, info);
192}
193
194std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
195 const WorkloadInfo& info) const
196{
197 return std::make_unique<RefConstantWorkload>(descriptor, info);
198}
199
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000200std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
201 const ConvertBf16ToFp32QueueDescriptor& descriptor,
202 const WorkloadInfo& info) const
203{
204 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
205}
206
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100207std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
208 const ConvertFp16ToFp32QueueDescriptor& descriptor,
209 const WorkloadInfo& info) const
210{
211 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
212}
213
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000214std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
215 const ConvertFp32ToBf16QueueDescriptor& descriptor,
216 const WorkloadInfo& info) const
217{
218 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
219}
220
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100221std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
222 const ConvertFp32ToFp16QueueDescriptor& descriptor,
223 const WorkloadInfo& info) const
224{
225 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
226}
227
228std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
229 const WorkloadInfo& info) const
230{
231 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
232}
233
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100234std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution3d(const Convolution3dQueueDescriptor& descriptor,
235 const WorkloadInfo& info) const
236{
237 return std::make_unique<RefConvolution3dWorkload>(descriptor, info);
238}
239
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100240std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
241 const WorkloadInfo& info) const
242{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000243 if (IsBFloat16(info))
244 {
245 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
246 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000247 if (IsFloat16(info))
248 {
249 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
250 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000251 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100252 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000253 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100254 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000255 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000256 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000257 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000258 }
Keith Davis67e6c542020-02-19 10:08:33 +0000259 if (IsQAsymmU8(info))
260 {
261 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
262 }
263 if (IsQAsymmS8(info))
264 {
265 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
266 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000267 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000268 {
269 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
270 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000271
Keith Davis0c2eeac2020-02-11 16:51:50 +0000272 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100273}
274
275std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
276 const WorkloadInfo& info) const
277{
278 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
279}
280
281std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
282 const DepthwiseConvolution2dQueueDescriptor& descriptor,
283 const WorkloadInfo& info) const
284{
285 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
286}
287
288std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
289 const WorkloadInfo& info) const
290{
291 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
292}
293
294std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
295 const DetectionPostProcessQueueDescriptor& descriptor,
296 const WorkloadInfo& info) const
297{
298 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
299}
300
301std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
302 const WorkloadInfo& info) const
303{
Finn Williamscbd2c232020-06-22 15:58:32 +0100304 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
305 {
306 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
307 }
308 else
309 {
310 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
311 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100312}
313
josh minor4a3c6102020-01-06 16:40:46 -0600314std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
315 const WorkloadInfo& info) const
316{
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000317 if (descriptor.m_Parameters.m_Operation == UnaryOperation::LogicalNot)
318 {
319 return std::make_unique<RefLogicalUnaryWorkload>(descriptor, info);
320 }
josh minor4a3c6102020-01-06 16:40:46 -0600321 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
322}
323
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100324std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
325 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100326{
327 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
328}
329
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100330std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
331 const WorkloadInfo& info) const
332{
333 return std::make_unique<RefFillWorkload>(descriptor, info);
334}
335
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100336std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
337 const WorkloadInfo& info) const
338{
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100339 if(IsQuantizedType(info.m_InputTensorInfos[0].GetDataType()))
340 {
341 return nullptr;
342 }
343 else
344 {
345 return std::make_unique<RefFloorWorkload>(descriptor, info);
346 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100347}
348
349std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
350 const FullyConnectedQueueDescriptor& descriptor,
351 const WorkloadInfo& info) const
352{
353 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
354}
355
356std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
357 const WorkloadInfo& info) const
358{
359 return std::make_unique<RefGatherWorkload>(descriptor, info);
360}
361
telsoa014fcda012018-03-09 14:13:49 +0000362std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
363 const WorkloadInfo& info) const
364{
365 if (info.m_InputTensorInfos.empty() )
366 {
367 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
368 }
369 if (info.m_OutputTensorInfos.empty())
370 {
371 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
372 }
373
374 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
375 {
376 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
377 }
378
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100379 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000380}
381
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100382std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
383 const InstanceNormalizationQueueDescriptor& descriptor,
384 const WorkloadInfo& info) const
385{
386 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
387}
388
389std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
390 const WorkloadInfo& info) const
391{
392 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
393}
394
James Conroyaba90cd2020-11-06 16:28:18 +0000395std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
396 const WorkloadInfo& info) const
397{
398 return std::make_unique<RefLogicalBinaryWorkload>(descriptor, info);
399}
400
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100401std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
402 const WorkloadInfo& info) const
403{
404 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
405}
406
407std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
408 const WorkloadInfo& info) const
409{
410 return std::make_unique<RefLstmWorkload>(descriptor, info);
411}
412
413std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
414 const WorkloadInfo& info) const
415{
Finn Williamscbd2c232020-06-22 15:58:32 +0100416 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
417 {
418 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
419 }
420 else
421 {
422 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
423 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100424}
425
426std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
427 const WorkloadInfo& info) const
428{
429 return std::make_unique<RefMeanWorkload>(descriptor, info);
430}
431
432std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
433 const WorkloadInfo& info) const
434{
435 if (descriptor.m_Inputs.empty())
436 {
437 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
438 }
439 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
440}
441
442std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
443 const WorkloadInfo& info) const
444{
445 if (descriptor.m_Inputs.empty())
446 {
447 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
448 }
449 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
450}
451
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100452std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
453 const WorkloadInfo& info) const
454{
Finn Williamscbd2c232020-06-22 15:58:32 +0100455 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
456 {
457 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
458 }
459 else
460 {
461 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
462 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100463}
464
465std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
466 const WorkloadInfo& info) const
467{
Finn Williamscbd2c232020-06-22 15:58:32 +0100468 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
469 {
470 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
471 }
472 else
473 {
474 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
475 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100476}
477
478std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
479 const WorkloadInfo& info) const
480{
481 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
482}
483
telsoa014fcda012018-03-09 14:13:49 +0000484std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
485 const WorkloadInfo& info) const
486{
487 if (info.m_InputTensorInfos.empty() )
488 {
489 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
490 }
491 if (info.m_OutputTensorInfos.empty())
492 {
493 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
494 }
495 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
496 {
497 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
498 }
499
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100500 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000501}
502
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100503std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
504 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000505{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100506 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000507}
508
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100509std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
510 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000511{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000512 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100513 {
514 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
515 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000516 else if (IsBFloat16(info))
517 {
518 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
519 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100520 else if (IsQAsymmS8(info))
521 {
522 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
523 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100524 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000525 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000526}
527
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100528std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
529 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000530{
Teresa Charlina3b20472019-06-06 11:12:32 +0100531 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000532}
533
Derek Lamberti901ea112019-12-10 22:07:09 +0000534std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
535 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000536{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100537 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000538}
539
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100540std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
541 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100542{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100543 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100544}
545
James Conroy4f1f8992020-04-29 20:01:10 +0100546std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
547 const WorkloadInfo& info) const
548{
549 return std::make_unique<RefQLstmWorkload>(descriptor, info);
550}
551
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100552std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
553 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000554{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100555 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000556}
557
Finn Williams2605b232020-06-10 15:53:46 +0100558std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
559 const WorkloadInfo& info) const
560{
561 return std::make_unique<RefRankWorkload>(descriptor, info);
562}
563
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000564std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
565 const WorkloadInfo& info) const
566{
567 return std::make_unique<RefReduceWorkload>(descriptor, info);
568}
569
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100570std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
571 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000572{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100573 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100574}
575
Teresa Charlin970f43b2019-07-01 13:51:07 +0100576std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
577 const WorkloadInfo& info) const
578{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100579 return std::make_unique<RefResizeWorkload>(descriptor, info);
580}
581
Keith Davis3ae3f972021-05-21 16:33:48 +0100582std::unique_ptr<IWorkload> RefWorkloadFactory::CreateShape(const ShapeQueueDescriptor& descriptor,
583 const WorkloadInfo& info) const
584{
585 return std::make_unique<RefShapeWorkload>(descriptor, info);
586}
587
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100588std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
589 const WorkloadInfo& info) const
590{
591 return std::make_unique<RefSliceWorkload>(descriptor, info);
592}
593
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100594std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
595 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100596{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100597 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
598}
599
600std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
601 const WorkloadInfo& info) const
602{
603 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
604}
605
606std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
607 const WorkloadInfo& info) const
608{
609 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
610}
611
612std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
613 const WorkloadInfo& info) const
614{
615 return std::make_unique<RefSplitterWorkload>(descriptor, info);
616}
617
618std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
619 const WorkloadInfo& info) const
620{
621 return std::make_unique<RefStackWorkload>(descriptor, info);
622}
623
624std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
625 const WorkloadInfo& info) const
626{
627 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
628}
629
630std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
631 const WorkloadInfo& info) const
632{
Finn Williamscbd2c232020-06-22 15:58:32 +0100633 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
634 {
635 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
636 }
637 else
638 {
639 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
640 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100641}
642
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000643std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
644 const WorkloadInfo& info) const
645{
646 if (IsQSymmS16(info))
647 {
648 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
649 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000650 else if (IsBFloat16(info))
651 {
652 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
653 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100654 else if (IsQAsymmS8(info))
655 {
656 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
657 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000658 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
659 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
660}
661
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100662std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
663 const TransposeConvolution2dQueueDescriptor& descriptor,
664 const WorkloadInfo& info) const
665{
666 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100667}
668
Narumol Prangnawarate5339e72021-07-28 17:33:28 +0100669std::unique_ptr<IWorkload> RefWorkloadFactory::CreateUnidirectionalSequenceLstm(
670 const UnidirectionalSequenceLstmQueueDescriptor& descriptor,
671 const WorkloadInfo& info) const
672{
673 return std::make_unique<RefUnidirectionalSequenceLstmWorkload>(descriptor, info);;
674}
675
Matteo Martincigh49124022019-01-11 13:25:59 +0000676} // namespace armnn