blob: 681b73a748a1f90ba720c7d683df4baddd433371 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01007#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00008#include <backendsCommon/MakeWorkloadHelper.hpp>
James Conroy1f58f032021-04-27 17:13:27 +01009#include <backendsCommon/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010011#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010012#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010013#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
telsoa014fcda012018-03-09 14:13:49 +000015
16namespace armnn
17{
18
David Beck79141b92018-10-23 16:09:36 +010019namespace
20{
21static const BackendId s_Id{RefBackendId()};
22}
telsoa014fcda012018-03-09 14:13:49 +000023template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
24std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010025 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000026{
Keith Davis5204aa82020-01-27 15:24:59 +000027 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
28 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010031template <DataType ArmnnType>
32bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010033{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010034 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
35 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010036 if (it != std::end(info.m_InputTensorInfos))
37 {
38 return true;
39 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010040 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010041 if (it != std::end(info.m_OutputTensorInfos))
42 {
43 return true;
44 }
45 return false;
46}
47
Keith Davis0c2eeac2020-02-11 16:51:50 +000048bool IsSigned32(const WorkloadInfo& info)
49{
50 return IsDataType<DataType::Signed32>(info);
51}
52
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000053bool IsBFloat16(const WorkloadInfo& info)
54{
55 return IsDataType<DataType::BFloat16>(info);
56}
57
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010058bool IsFloat16(const WorkloadInfo& info)
59{
60 return IsDataType<DataType::Float16>(info);
61}
62
Keith Davis0c2eeac2020-02-11 16:51:50 +000063bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010064{
Derek Lambertif90c56d2020-01-10 17:14:08 +000065 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010066}
67
Keith Davis0c2eeac2020-02-11 16:51:50 +000068bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000069{
70 return IsDataType<DataType::QSymmS8>(info);
71}
72
Keith Davis67e6c542020-02-19 10:08:33 +000073bool IsQAsymmS8(const WorkloadInfo& info)
74{
75 return IsDataType<DataType::QAsymmS8>(info);
76}
77
78bool IsQAsymmU8(const WorkloadInfo& info)
79{
80 return IsDataType<DataType::QAsymmU8>(info);
81}
82
Matthew Bentham7c1603a2019-06-21 17:22:23 +010083RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
84 : m_MemoryManager(memoryManager)
85{
86}
87
telsoa01c577f2c2018-08-31 09:22:23 +010088RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010089 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000090{
91}
92
David Beck79141b92018-10-23 16:09:36 +010093const BackendId& RefWorkloadFactory::GetBackendId() const
94{
95 return s_Id;
96}
97
David Beck29c75de2018-10-23 13:35:58 +010098bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
99 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100100 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000101{
David Beck79141b92018-10-23 16:09:36 +0100102 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000103}
104
Sadik Armagan04a72972020-09-14 15:44:18 +0100105bool RefWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
106 Optional<DataType> dataType,
107 std::string& outReasonIfUnsupported,
108 const ModelOptions& modelOptions)
109{
110 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
111}
112
David Monahan3fb7e102019-08-20 11:25:29 +0100113std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000114 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000115{
David Monahan3fb7e102019-08-20 11:25:29 +0100116 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
117 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000118 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100119 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000120}
121
Francis Murtagh351d13d2018-09-24 15:01:18 +0100122std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100123 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000124 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100125{
David Monahan3fb7e102019-08-20 11:25:29 +0100126 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
127 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000128 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100129 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100130}
131
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100132std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
133 const WorkloadInfo& info) const
134{
Jan Eilers8eb25602020-03-09 12:13:48 +0000135 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600136 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
137 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
138
139 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100140}
141
142std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
143 const WorkloadInfo& info) const
144{
145 return std::make_unique<RefActivationWorkload>(descriptor, info);
146}
147
148std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
149 const WorkloadInfo& info) const
150{
Finn Williamscbd2c232020-06-22 15:58:32 +0100151 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
152 {
153 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
154 }
155 else
156 {
157 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
158 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100159}
160
161std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
162 const WorkloadInfo& info) const
163{
164 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
165}
166
167std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
168 const BatchNormalizationQueueDescriptor& descriptor,
169 const WorkloadInfo& info) const
170{
171 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
172}
173
174std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
175 const WorkloadInfo& info) const
176{
177 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
178}
179
mathad01b392e982021-04-07 12:07:30 +0100180std::unique_ptr<IWorkload> RefWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
181 const WorkloadInfo& info) const
182{
183 return std::make_unique<RefCastWorkload>(descriptor, info);
184}
185
Simon Obute51f67772021-09-03 15:50:13 +0100186std::unique_ptr<IWorkload> RefWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor &descriptor,
187 const WorkloadInfo &info) const
188{
189 return std::make_unique<RefChannelShuffleWorkload>(descriptor,info);
190}
191
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100192std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
193 const WorkloadInfo& info) const
194{
195 return std::make_unique<RefComparisonWorkload>(descriptor, info);
196}
197
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100198std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
199 const WorkloadInfo& info) const
200{
201 return std::make_unique<RefConcatWorkload>(descriptor, info);
202}
203
204std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
205 const WorkloadInfo& info) const
206{
207 return std::make_unique<RefConstantWorkload>(descriptor, info);
208}
209
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000210std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
211 const ConvertBf16ToFp32QueueDescriptor& descriptor,
212 const WorkloadInfo& info) const
213{
214 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
215}
216
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100217std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
218 const ConvertFp16ToFp32QueueDescriptor& descriptor,
219 const WorkloadInfo& info) const
220{
221 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
222}
223
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000224std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
225 const ConvertFp32ToBf16QueueDescriptor& descriptor,
226 const WorkloadInfo& info) const
227{
228 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
229}
230
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100231std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
232 const ConvertFp32ToFp16QueueDescriptor& descriptor,
233 const WorkloadInfo& info) const
234{
235 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
236}
237
238std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
239 const WorkloadInfo& info) const
240{
241 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
242}
243
244std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
245 const WorkloadInfo& info) const
246{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000247 if (IsBFloat16(info))
248 {
249 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
250 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000251 if (IsFloat16(info))
252 {
253 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
254 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000255 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100256 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000257 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100258 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000259 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000260 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000261 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000262 }
Keith Davis67e6c542020-02-19 10:08:33 +0000263 if (IsQAsymmU8(info))
264 {
265 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
266 }
267 if (IsQAsymmS8(info))
268 {
269 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
270 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000271 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000272 {
273 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
274 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000275
Keith Davis0c2eeac2020-02-11 16:51:50 +0000276 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100277}
278
279std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
280 const WorkloadInfo& info) const
281{
282 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
283}
284
285std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
286 const DepthwiseConvolution2dQueueDescriptor& descriptor,
287 const WorkloadInfo& info) const
288{
289 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
290}
291
292std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
293 const WorkloadInfo& info) const
294{
295 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
296}
297
298std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
299 const DetectionPostProcessQueueDescriptor& descriptor,
300 const WorkloadInfo& info) const
301{
302 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
303}
304
305std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
306 const WorkloadInfo& info) const
307{
Finn Williamscbd2c232020-06-22 15:58:32 +0100308 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
309 {
310 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
311 }
312 else
313 {
314 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
315 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100316}
317
josh minor4a3c6102020-01-06 16:40:46 -0600318std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
319 const WorkloadInfo& info) const
320{
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000321 if (descriptor.m_Parameters.m_Operation == UnaryOperation::LogicalNot)
322 {
323 return std::make_unique<RefLogicalUnaryWorkload>(descriptor, info);
324 }
josh minor4a3c6102020-01-06 16:40:46 -0600325 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
326}
327
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100328std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
329 const WorkloadInfo& info) const
330{
Jan Eilers8eb25602020-03-09 12:13:48 +0000331 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100332 ComparisonQueueDescriptor comparisonDescriptor;
333 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
334
335 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100336}
337
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100338std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
339 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100340{
341 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
342}
343
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100344std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
345 const WorkloadInfo& info) const
346{
347 return std::make_unique<RefFillWorkload>(descriptor, info);
348}
349
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100350std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
351 const WorkloadInfo& info) const
352{
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100353 if(IsQuantizedType(info.m_InputTensorInfos[0].GetDataType()))
354 {
355 return nullptr;
356 }
357 else
358 {
359 return std::make_unique<RefFloorWorkload>(descriptor, info);
360 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100361}
362
363std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
364 const FullyConnectedQueueDescriptor& descriptor,
365 const WorkloadInfo& info) const
366{
367 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
368}
369
370std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
371 const WorkloadInfo& info) const
372{
373 return std::make_unique<RefGatherWorkload>(descriptor, info);
374}
375
376std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
377 const WorkloadInfo& info) const
378{
Jan Eilers8eb25602020-03-09 12:13:48 +0000379 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100380 ComparisonQueueDescriptor comparisonDescriptor;
381 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
382
383 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100384}
385
telsoa014fcda012018-03-09 14:13:49 +0000386std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
387 const WorkloadInfo& info) const
388{
389 if (info.m_InputTensorInfos.empty() )
390 {
391 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
392 }
393 if (info.m_OutputTensorInfos.empty())
394 {
395 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
396 }
397
398 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
399 {
400 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
401 }
402
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100403 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000404}
405
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100406std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
407 const InstanceNormalizationQueueDescriptor& descriptor,
408 const WorkloadInfo& info) const
409{
410 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
411}
412
413std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
414 const WorkloadInfo& info) const
415{
416 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
417}
418
James Conroyaba90cd2020-11-06 16:28:18 +0000419std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
420 const WorkloadInfo& info) const
421{
422 return std::make_unique<RefLogicalBinaryWorkload>(descriptor, info);
423}
424
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100425std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
426 const WorkloadInfo& info) const
427{
428 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
429}
430
431std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
432 const WorkloadInfo& info) const
433{
434 return std::make_unique<RefLstmWorkload>(descriptor, info);
435}
436
437std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
438 const WorkloadInfo& info) const
439{
Finn Williamscbd2c232020-06-22 15:58:32 +0100440 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
441 {
442 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
443 }
444 else
445 {
446 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
447 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100448}
449
450std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
451 const WorkloadInfo& info) const
452{
453 return std::make_unique<RefMeanWorkload>(descriptor, info);
454}
455
456std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
457 const WorkloadInfo& info) const
458{
459 if (descriptor.m_Inputs.empty())
460 {
461 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
462 }
463 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
464}
465
466std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
467 const WorkloadInfo& info) const
468{
469 if (descriptor.m_Inputs.empty())
470 {
471 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
472 }
473 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
474}
475
476std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
477 const WorkloadInfo& info) const
478{
479 return CreateConcat(descriptor, info);
480}
481
482std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
483 const WorkloadInfo& info) const
484{
Finn Williamscbd2c232020-06-22 15:58:32 +0100485 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
486 {
487 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
488 }
489 else
490 {
491 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
492 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100493}
494
495std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
496 const WorkloadInfo& info) const
497{
Finn Williamscbd2c232020-06-22 15:58:32 +0100498 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
499 {
500 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
501 }
502 else
503 {
504 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
505 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100506}
507
508std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
509 const WorkloadInfo& info) const
510{
511 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
512}
513
telsoa014fcda012018-03-09 14:13:49 +0000514std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
515 const WorkloadInfo& info) const
516{
517 if (info.m_InputTensorInfos.empty() )
518 {
519 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
520 }
521 if (info.m_OutputTensorInfos.empty())
522 {
523 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
524 }
525 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
526 {
527 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
528 }
529
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100530 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000531}
532
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100533std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
534 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000535{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100536 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000537}
538
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100539std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
540 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000541{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000542 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100543 {
544 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
545 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000546 else if (IsBFloat16(info))
547 {
548 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
549 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100550 else if (IsQAsymmS8(info))
551 {
552 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
553 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100554 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000555 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000556}
557
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100558std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
559 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000560{
Teresa Charlina3b20472019-06-06 11:12:32 +0100561 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000562}
563
Derek Lamberti901ea112019-12-10 22:07:09 +0000564std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
565 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000566{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100567 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000568}
569
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100570std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
571 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100572{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100573 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100574}
575
James Conroy4f1f8992020-04-29 20:01:10 +0100576std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
577 const WorkloadInfo& info) const
578{
579 return std::make_unique<RefQLstmWorkload>(descriptor, info);
580}
581
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100582std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
583 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000584{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100585 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000586}
587
Finn Williams2605b232020-06-10 15:53:46 +0100588std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
589 const WorkloadInfo& info) const
590{
591 return std::make_unique<RefRankWorkload>(descriptor, info);
592}
593
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000594std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
595 const WorkloadInfo& info) const
596{
597 return std::make_unique<RefReduceWorkload>(descriptor, info);
598}
599
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100600std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
601 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000602{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100603 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100604}
605
Teresa Charlin970f43b2019-07-01 13:51:07 +0100606std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
607 const WorkloadInfo& info) const
608{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100609 return std::make_unique<RefResizeWorkload>(descriptor, info);
610}
611
telsoa014fcda012018-03-09 14:13:49 +0000612std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
613 const WorkloadInfo& info) const
614{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100615 ResizeQueueDescriptor resizeDescriptor;
616 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
617 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
618 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
619 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
620
621 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000622}
623
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000624std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
625 const WorkloadInfo& info) const
626{
Jan Eilers8eb25602020-03-09 12:13:48 +0000627 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600628 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
629 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
630
631 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000632}
633
Keith Davis3ae3f972021-05-21 16:33:48 +0100634std::unique_ptr<IWorkload> RefWorkloadFactory::CreateShape(const ShapeQueueDescriptor& descriptor,
635 const WorkloadInfo& info) const
636{
637 return std::make_unique<RefShapeWorkload>(descriptor, info);
638}
639
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100640std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
641 const WorkloadInfo& info) const
642{
643 return std::make_unique<RefSliceWorkload>(descriptor, info);
644}
645
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100646std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
647 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100648{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100649 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
650}
651
652std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
653 const WorkloadInfo& info) const
654{
655 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
656}
657
658std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
659 const WorkloadInfo& info) const
660{
661 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
662}
663
664std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
665 const WorkloadInfo& info) const
666{
667 return std::make_unique<RefSplitterWorkload>(descriptor, info);
668}
669
670std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
671 const WorkloadInfo& info) const
672{
673 return std::make_unique<RefStackWorkload>(descriptor, info);
674}
675
676std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
677 const WorkloadInfo& info) const
678{
679 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
680}
681
682std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
683 const WorkloadInfo& info) const
684{
Finn Williamscbd2c232020-06-22 15:58:32 +0100685 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
686 {
687 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
688 }
689 else
690 {
691 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
692 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100693}
694
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000695std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
696 const WorkloadInfo& info) const
697{
698 if (IsQSymmS16(info))
699 {
700 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
701 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000702 else if (IsBFloat16(info))
703 {
704 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
705 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100706 else if (IsQAsymmS8(info))
707 {
708 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
709 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000710 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
711 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
712}
713
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100714std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
715 const TransposeConvolution2dQueueDescriptor& descriptor,
716 const WorkloadInfo& info) const
717{
718 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100719}
720
Narumol Prangnawarate5339e72021-07-28 17:33:28 +0100721std::unique_ptr<IWorkload> RefWorkloadFactory::CreateUnidirectionalSequenceLstm(
722 const UnidirectionalSequenceLstmQueueDescriptor& descriptor,
723 const WorkloadInfo& info) const
724{
725 return std::make_unique<RefUnidirectionalSequenceLstmWorkload>(descriptor, info);;
726}
727
Matteo Martincigh49124022019-01-11 13:25:59 +0000728} // namespace armnn