blob: 16cf17cc7947a42fe6fe0cce2559fccea7f36ee3 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01007#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00008#include <backendsCommon/MakeWorkloadHelper.hpp>
James Conroy1f58f032021-04-27 17:13:27 +01009#include <backendsCommon/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010011#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010012#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010013#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
telsoa014fcda012018-03-09 14:13:49 +000015
16namespace armnn
17{
18
David Beck79141b92018-10-23 16:09:36 +010019namespace
20{
21static const BackendId s_Id{RefBackendId()};
22}
telsoa014fcda012018-03-09 14:13:49 +000023template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
24std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010025 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000026{
Keith Davis5204aa82020-01-27 15:24:59 +000027 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
28 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010031template <DataType ArmnnType>
32bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010033{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010034 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
35 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010036 if (it != std::end(info.m_InputTensorInfos))
37 {
38 return true;
39 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010040 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010041 if (it != std::end(info.m_OutputTensorInfos))
42 {
43 return true;
44 }
45 return false;
46}
47
Keith Davis0c2eeac2020-02-11 16:51:50 +000048bool IsSigned32(const WorkloadInfo& info)
49{
50 return IsDataType<DataType::Signed32>(info);
51}
52
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000053bool IsBFloat16(const WorkloadInfo& info)
54{
55 return IsDataType<DataType::BFloat16>(info);
56}
57
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010058bool IsFloat16(const WorkloadInfo& info)
59{
60 return IsDataType<DataType::Float16>(info);
61}
62
Keith Davis0c2eeac2020-02-11 16:51:50 +000063bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010064{
Derek Lambertif90c56d2020-01-10 17:14:08 +000065 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010066}
67
Keith Davis0c2eeac2020-02-11 16:51:50 +000068bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000069{
70 return IsDataType<DataType::QSymmS8>(info);
71}
72
Keith Davis67e6c542020-02-19 10:08:33 +000073bool IsQAsymmS8(const WorkloadInfo& info)
74{
75 return IsDataType<DataType::QAsymmS8>(info);
76}
77
78bool IsQAsymmU8(const WorkloadInfo& info)
79{
80 return IsDataType<DataType::QAsymmU8>(info);
81}
82
Matthew Bentham7c1603a2019-06-21 17:22:23 +010083RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
84 : m_MemoryManager(memoryManager)
85{
86}
87
telsoa01c577f2c2018-08-31 09:22:23 +010088RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010089 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000090{
91}
92
David Beck79141b92018-10-23 16:09:36 +010093const BackendId& RefWorkloadFactory::GetBackendId() const
94{
95 return s_Id;
96}
97
David Beck29c75de2018-10-23 13:35:58 +010098bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
99 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100100 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000101{
David Beck79141b92018-10-23 16:09:36 +0100102 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000103}
104
Sadik Armagan04a72972020-09-14 15:44:18 +0100105bool RefWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
106 Optional<DataType> dataType,
107 std::string& outReasonIfUnsupported,
108 const ModelOptions& modelOptions)
109{
110 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
111}
112
David Monahan3fb7e102019-08-20 11:25:29 +0100113std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000114 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000115{
David Monahan3fb7e102019-08-20 11:25:29 +0100116 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
117 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000118 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100119 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000120}
121
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100123 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000124 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100125{
David Monahan3fb7e102019-08-20 11:25:29 +0100126 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
127 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000128 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100129 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100130}
131
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100132std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
133 const WorkloadInfo& info) const
134{
Jan Eilers8eb25602020-03-09 12:13:48 +0000135 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600136 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
137 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
138
139 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100140}
141
142std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
143 const WorkloadInfo& info) const
144{
145 return std::make_unique<RefActivationWorkload>(descriptor, info);
146}
147
148std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
149 const WorkloadInfo& info) const
150{
Finn Williamscbd2c232020-06-22 15:58:32 +0100151 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
152 {
153 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
154 }
155 else
156 {
157 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
158 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100159}
160
161std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
162 const WorkloadInfo& info) const
163{
164 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
165}
166
167std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
168 const BatchNormalizationQueueDescriptor& descriptor,
169 const WorkloadInfo& info) const
170{
171 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
172}
173
174std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
175 const WorkloadInfo& info) const
176{
177 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
178}
179
mathad01b392e982021-04-07 12:07:30 +0100180std::unique_ptr<IWorkload> RefWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
181 const WorkloadInfo& info) const
182{
183 return std::make_unique<RefCastWorkload>(descriptor, info);
184}
185
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100186std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
187 const WorkloadInfo& info) const
188{
189 return std::make_unique<RefComparisonWorkload>(descriptor, info);
190}
191
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100192std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
193 const WorkloadInfo& info) const
194{
195 return std::make_unique<RefConcatWorkload>(descriptor, info);
196}
197
198std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
199 const WorkloadInfo& info) const
200{
201 return std::make_unique<RefConstantWorkload>(descriptor, info);
202}
203
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000204std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
205 const ConvertBf16ToFp32QueueDescriptor& descriptor,
206 const WorkloadInfo& info) const
207{
208 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
209}
210
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100211std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
212 const ConvertFp16ToFp32QueueDescriptor& descriptor,
213 const WorkloadInfo& info) const
214{
215 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
216}
217
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000218std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
219 const ConvertFp32ToBf16QueueDescriptor& descriptor,
220 const WorkloadInfo& info) const
221{
222 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
223}
224
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100225std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
226 const ConvertFp32ToFp16QueueDescriptor& descriptor,
227 const WorkloadInfo& info) const
228{
229 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
230}
231
232std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
233 const WorkloadInfo& info) const
234{
235 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
236}
237
238std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
239 const WorkloadInfo& info) const
240{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000241 if (IsBFloat16(info))
242 {
243 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
244 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000245 if (IsFloat16(info))
246 {
247 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
248 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000249 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100250 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000251 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100252 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000253 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000254 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000255 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000256 }
Keith Davis67e6c542020-02-19 10:08:33 +0000257 if (IsQAsymmU8(info))
258 {
259 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
260 }
261 if (IsQAsymmS8(info))
262 {
263 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
264 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000265 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000266 {
267 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
268 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000269
Keith Davis0c2eeac2020-02-11 16:51:50 +0000270 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100271}
272
273std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
274 const WorkloadInfo& info) const
275{
276 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
277}
278
279std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
280 const DepthwiseConvolution2dQueueDescriptor& descriptor,
281 const WorkloadInfo& info) const
282{
283 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
284}
285
286std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
287 const WorkloadInfo& info) const
288{
289 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
290}
291
292std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
293 const DetectionPostProcessQueueDescriptor& descriptor,
294 const WorkloadInfo& info) const
295{
296 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
297}
298
299std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
300 const WorkloadInfo& info) const
301{
Finn Williamscbd2c232020-06-22 15:58:32 +0100302 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
303 {
304 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
305 }
306 else
307 {
308 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
309 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100310}
311
josh minor4a3c6102020-01-06 16:40:46 -0600312std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
313 const WorkloadInfo& info) const
314{
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000315 if (descriptor.m_Parameters.m_Operation == UnaryOperation::LogicalNot)
316 {
317 return std::make_unique<RefLogicalUnaryWorkload>(descriptor, info);
318 }
josh minor4a3c6102020-01-06 16:40:46 -0600319 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
320}
321
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100322std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
323 const WorkloadInfo& info) const
324{
Jan Eilers8eb25602020-03-09 12:13:48 +0000325 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100326 ComparisonQueueDescriptor comparisonDescriptor;
327 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
328
329 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100330}
331
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100332std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
333 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100334{
335 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
336}
337
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100338std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
339 const WorkloadInfo& info) const
340{
341 return std::make_unique<RefFillWorkload>(descriptor, info);
342}
343
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100344std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
345 const WorkloadInfo& info) const
346{
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100347 if(IsQuantizedType(info.m_InputTensorInfos[0].GetDataType()))
348 {
349 return nullptr;
350 }
351 else
352 {
353 return std::make_unique<RefFloorWorkload>(descriptor, info);
354 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100355}
356
357std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
358 const FullyConnectedQueueDescriptor& descriptor,
359 const WorkloadInfo& info) const
360{
361 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
362}
363
364std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
365 const WorkloadInfo& info) const
366{
367 return std::make_unique<RefGatherWorkload>(descriptor, info);
368}
369
370std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
371 const WorkloadInfo& info) const
372{
Jan Eilers8eb25602020-03-09 12:13:48 +0000373 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100374 ComparisonQueueDescriptor comparisonDescriptor;
375 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
376
377 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100378}
379
telsoa014fcda012018-03-09 14:13:49 +0000380std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
381 const WorkloadInfo& info) const
382{
383 if (info.m_InputTensorInfos.empty() )
384 {
385 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
386 }
387 if (info.m_OutputTensorInfos.empty())
388 {
389 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
390 }
391
392 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
393 {
394 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
395 }
396
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100397 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000398}
399
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100400std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
401 const InstanceNormalizationQueueDescriptor& descriptor,
402 const WorkloadInfo& info) const
403{
404 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
405}
406
407std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
408 const WorkloadInfo& info) const
409{
410 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
411}
412
James Conroyaba90cd2020-11-06 16:28:18 +0000413std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
414 const WorkloadInfo& info) const
415{
416 return std::make_unique<RefLogicalBinaryWorkload>(descriptor, info);
417}
418
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100419std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
420 const WorkloadInfo& info) const
421{
422 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
423}
424
425std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
426 const WorkloadInfo& info) const
427{
428 return std::make_unique<RefLstmWorkload>(descriptor, info);
429}
430
431std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
432 const WorkloadInfo& info) const
433{
Finn Williamscbd2c232020-06-22 15:58:32 +0100434 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
435 {
436 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
437 }
438 else
439 {
440 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
441 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100442}
443
444std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
445 const WorkloadInfo& info) const
446{
447 return std::make_unique<RefMeanWorkload>(descriptor, info);
448}
449
450std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
451 const WorkloadInfo& info) const
452{
453 if (descriptor.m_Inputs.empty())
454 {
455 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
456 }
457 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
458}
459
460std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
461 const WorkloadInfo& info) const
462{
463 if (descriptor.m_Inputs.empty())
464 {
465 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
466 }
467 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
468}
469
470std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
471 const WorkloadInfo& info) const
472{
473 return CreateConcat(descriptor, info);
474}
475
476std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
477 const WorkloadInfo& info) const
478{
Finn Williamscbd2c232020-06-22 15:58:32 +0100479 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
480 {
481 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
482 }
483 else
484 {
485 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
486 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100487}
488
489std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
490 const WorkloadInfo& info) const
491{
Finn Williamscbd2c232020-06-22 15:58:32 +0100492 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
493 {
494 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
495 }
496 else
497 {
498 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
499 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100500}
501
502std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
503 const WorkloadInfo& info) const
504{
505 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
506}
507
telsoa014fcda012018-03-09 14:13:49 +0000508std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
509 const WorkloadInfo& info) const
510{
511 if (info.m_InputTensorInfos.empty() )
512 {
513 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
514 }
515 if (info.m_OutputTensorInfos.empty())
516 {
517 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
518 }
519 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
520 {
521 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
522 }
523
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100524 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000525}
526
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100527std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
528 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000529{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100530 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000531}
532
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100533std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
534 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000535{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000536 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100537 {
538 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
539 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000540 else if (IsBFloat16(info))
541 {
542 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
543 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100544 else if (IsQAsymmS8(info))
545 {
546 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
547 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100548 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000549 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000550}
551
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100552std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
553 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000554{
Teresa Charlina3b20472019-06-06 11:12:32 +0100555 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000556}
557
Derek Lamberti901ea112019-12-10 22:07:09 +0000558std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
559 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000560{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100561 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000562}
563
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100564std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
565 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100566{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100567 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100568}
569
James Conroy4f1f8992020-04-29 20:01:10 +0100570std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
571 const WorkloadInfo& info) const
572{
573 return std::make_unique<RefQLstmWorkload>(descriptor, info);
574}
575
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100576std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
577 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000578{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100579 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000580}
581
Finn Williams2605b232020-06-10 15:53:46 +0100582std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
583 const WorkloadInfo& info) const
584{
585 return std::make_unique<RefRankWorkload>(descriptor, info);
586}
587
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000588std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
589 const WorkloadInfo& info) const
590{
591 return std::make_unique<RefReduceWorkload>(descriptor, info);
592}
593
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100594std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
595 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000596{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100597 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100598}
599
Teresa Charlin970f43b2019-07-01 13:51:07 +0100600std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
601 const WorkloadInfo& info) const
602{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100603 return std::make_unique<RefResizeWorkload>(descriptor, info);
604}
605
telsoa014fcda012018-03-09 14:13:49 +0000606std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
607 const WorkloadInfo& info) const
608{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100609 ResizeQueueDescriptor resizeDescriptor;
610 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
611 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
612 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
613 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
614
615 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000616}
617
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000618std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
619 const WorkloadInfo& info) const
620{
Jan Eilers8eb25602020-03-09 12:13:48 +0000621 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600622 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
623 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
624
625 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000626}
627
Keith Davis3ae3f972021-05-21 16:33:48 +0100628std::unique_ptr<IWorkload> RefWorkloadFactory::CreateShape(const ShapeQueueDescriptor& descriptor,
629 const WorkloadInfo& info) const
630{
631 return std::make_unique<RefShapeWorkload>(descriptor, info);
632}
633
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100634std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
635 const WorkloadInfo& info) const
636{
637 return std::make_unique<RefSliceWorkload>(descriptor, info);
638}
639
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100640std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
641 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100642{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100643 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
644}
645
646std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
647 const WorkloadInfo& info) const
648{
649 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
650}
651
652std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
653 const WorkloadInfo& info) const
654{
655 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
656}
657
658std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
659 const WorkloadInfo& info) const
660{
661 return std::make_unique<RefSplitterWorkload>(descriptor, info);
662}
663
664std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
665 const WorkloadInfo& info) const
666{
667 return std::make_unique<RefStackWorkload>(descriptor, info);
668}
669
670std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
671 const WorkloadInfo& info) const
672{
673 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
674}
675
676std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
677 const WorkloadInfo& info) const
678{
Finn Williamscbd2c232020-06-22 15:58:32 +0100679 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
680 {
681 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
682 }
683 else
684 {
685 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
686 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100687}
688
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000689std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
690 const WorkloadInfo& info) const
691{
692 if (IsQSymmS16(info))
693 {
694 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
695 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000696 else if (IsBFloat16(info))
697 {
698 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
699 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100700 else if (IsQAsymmS8(info))
701 {
702 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
703 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000704 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
705 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
706}
707
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100708std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
709 const TransposeConvolution2dQueueDescriptor& descriptor,
710 const WorkloadInfo& info) const
711{
712 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100713}
714
Narumol Prangnawarate5339e72021-07-28 17:33:28 +0100715std::unique_ptr<IWorkload> RefWorkloadFactory::CreateUnidirectionalSequenceLstm(
716 const UnidirectionalSequenceLstmQueueDescriptor& descriptor,
717 const WorkloadInfo& info) const
718{
719 return std::make_unique<RefUnidirectionalSequenceLstmWorkload>(descriptor, info);;
720}
721
Matteo Martincigh49124022019-01-11 13:25:59 +0000722} // namespace armnn