blob: 468aeb38778e99579a176b8ed90e83ef4646d637 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +010010#include <reference/workloads/RefFillWorkload.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010012#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010013#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010014#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000015
telsoa014fcda012018-03-09 14:13:49 +000016
17namespace armnn
18{
19
David Beck79141b92018-10-23 16:09:36 +010020namespace
21{
22static const BackendId s_Id{RefBackendId()};
23}
telsoa014fcda012018-03-09 14:13:49 +000024template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
25std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010026 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000027{
Keith Davis5204aa82020-01-27 15:24:59 +000028 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
29 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000030}
31
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010032template <DataType ArmnnType>
33bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010034{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010035 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
36 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010037 if (it != std::end(info.m_InputTensorInfos))
38 {
39 return true;
40 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010041 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010042 if (it != std::end(info.m_OutputTensorInfos))
43 {
44 return true;
45 }
46 return false;
47}
48
Keith Davis0c2eeac2020-02-11 16:51:50 +000049bool IsSigned32(const WorkloadInfo& info)
50{
51 return IsDataType<DataType::Signed32>(info);
52}
53
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000054bool IsBFloat16(const WorkloadInfo& info)
55{
56 return IsDataType<DataType::BFloat16>(info);
57}
58
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010059bool IsFloat16(const WorkloadInfo& info)
60{
61 return IsDataType<DataType::Float16>(info);
62}
63
Keith Davis0c2eeac2020-02-11 16:51:50 +000064bool IsQSymmS16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010065{
Derek Lambertif90c56d2020-01-10 17:14:08 +000066 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010067}
68
Keith Davis0c2eeac2020-02-11 16:51:50 +000069bool IsQSymmS8(const WorkloadInfo& info)
Keith Davis5204aa82020-01-27 15:24:59 +000070{
71 return IsDataType<DataType::QSymmS8>(info);
72}
73
Keith Davis67e6c542020-02-19 10:08:33 +000074bool IsQAsymmS8(const WorkloadInfo& info)
75{
76 return IsDataType<DataType::QAsymmS8>(info);
77}
78
79bool IsQAsymmU8(const WorkloadInfo& info)
80{
81 return IsDataType<DataType::QAsymmU8>(info);
82}
83
Matthew Bentham7c1603a2019-06-21 17:22:23 +010084RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
85 : m_MemoryManager(memoryManager)
86{
87}
88
telsoa01c577f2c2018-08-31 09:22:23 +010089RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010090 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000091{
92}
93
David Beck79141b92018-10-23 16:09:36 +010094const BackendId& RefWorkloadFactory::GetBackendId() const
95{
96 return s_Id;
97}
98
David Beck29c75de2018-10-23 13:35:58 +010099bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
100 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100101 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000102{
David Beck79141b92018-10-23 16:09:36 +0100103 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +0000104}
105
Sadik Armagan04a72972020-09-14 15:44:18 +0100106bool RefWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
107 Optional<DataType> dataType,
108 std::string& outReasonIfUnsupported,
109 const ModelOptions& modelOptions)
110{
111 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
112}
113
David Monahan3fb7e102019-08-20 11:25:29 +0100114std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +0000115 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000116{
David Monahan3fb7e102019-08-20 11:25:29 +0100117 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
118 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000119 IgnoreUnused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100120 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +0000121}
122
Francis Murtagh351d13d2018-09-24 15:01:18 +0100123std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100124 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +0000125 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100126{
David Monahan3fb7e102019-08-20 11:25:29 +0100127 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
128 // to unmanaged memory. This also ensures memory alignment.
Jan Eilers8eb25602020-03-09 12:13:48 +0000129 IgnoreUnused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100130 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100131}
132
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100133std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
134 const WorkloadInfo& info) const
135{
Jan Eilers8eb25602020-03-09 12:13:48 +0000136 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600137 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
138 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
139
140 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100141}
142
143std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
144 const WorkloadInfo& info) const
145{
146 return std::make_unique<RefActivationWorkload>(descriptor, info);
147}
148
149std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
150 const WorkloadInfo& info) const
151{
Finn Williamscbd2c232020-06-22 15:58:32 +0100152 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
153 {
154 return std::make_unique<RefAdditionWorkload<int32_t>>(descriptor, info);
155 }
156 else
157 {
158 return std::make_unique<RefAdditionWorkload<float>>(descriptor, info);
159 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100160}
161
162std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
163 const WorkloadInfo& info) const
164{
165 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
166}
167
168std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
169 const BatchNormalizationQueueDescriptor& descriptor,
170 const WorkloadInfo& info) const
171{
172 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
173}
174
175std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
176 const WorkloadInfo& info) const
177{
178 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
179}
180
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100181std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
182 const WorkloadInfo& info) const
183{
184 return std::make_unique<RefComparisonWorkload>(descriptor, info);
185}
186
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100187std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
188 const WorkloadInfo& info) const
189{
190 return std::make_unique<RefConcatWorkload>(descriptor, info);
191}
192
193std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
194 const WorkloadInfo& info) const
195{
196 return std::make_unique<RefConstantWorkload>(descriptor, info);
197}
198
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000199std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertBf16ToFp32(
200 const ConvertBf16ToFp32QueueDescriptor& descriptor,
201 const WorkloadInfo& info) const
202{
203 return std::make_unique<RefConvertBf16ToFp32Workload>(descriptor, info);
204}
205
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100206std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
207 const ConvertFp16ToFp32QueueDescriptor& descriptor,
208 const WorkloadInfo& info) const
209{
210 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
211}
212
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000213std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToBf16(
214 const ConvertFp32ToBf16QueueDescriptor& descriptor,
215 const WorkloadInfo& info) const
216{
217 return std::make_unique<RefConvertFp32ToBf16Workload>(descriptor, info);
218}
219
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100220std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
221 const ConvertFp32ToFp16QueueDescriptor& descriptor,
222 const WorkloadInfo& info) const
223{
224 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
225}
226
227std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
228 const WorkloadInfo& info) const
229{
230 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
231}
232
233std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
234 const WorkloadInfo& info) const
235{
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000236 if (IsBFloat16(info))
237 {
238 return std::make_unique<RefDebugBFloat16Workload>(descriptor, info);
239 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000240 if (IsFloat16(info))
241 {
242 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
243 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000244 if (IsQSymmS16(info))
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100245 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000246 return std::make_unique<RefDebugQSymmS16Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100247 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000248 if (IsQSymmS8(info))
Keith Davis5204aa82020-01-27 15:24:59 +0000249 {
Keith Davis0c2eeac2020-02-11 16:51:50 +0000250 return std::make_unique<RefDebugQSymmS8Workload>(descriptor, info);
Keith Davis5204aa82020-01-27 15:24:59 +0000251 }
Keith Davis67e6c542020-02-19 10:08:33 +0000252 if (IsQAsymmU8(info))
253 {
254 return std::make_unique<RefDebugQAsymmU8Workload>(descriptor, info);
255 }
256 if (IsQAsymmS8(info))
257 {
258 return std::make_unique<RefDebugQAsymmS8Workload>(descriptor, info);
259 }
Keith Davis0c2eeac2020-02-11 16:51:50 +0000260 if (IsSigned32(info))
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000261 {
262 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
263 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000264
Keith Davis0c2eeac2020-02-11 16:51:50 +0000265 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymmU8Workload>(descriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100266}
267
268std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
269 const WorkloadInfo& info) const
270{
271 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
272}
273
274std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
275 const DepthwiseConvolution2dQueueDescriptor& descriptor,
276 const WorkloadInfo& info) const
277{
278 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
279}
280
281std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
282 const WorkloadInfo& info) const
283{
284 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
285}
286
287std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
288 const DetectionPostProcessQueueDescriptor& descriptor,
289 const WorkloadInfo& info) const
290{
291 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
292}
293
294std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
295 const WorkloadInfo& info) const
296{
Finn Williamscbd2c232020-06-22 15:58:32 +0100297 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
298 {
299 return std::make_unique<RefDivisionWorkload<int32_t>>(descriptor, info);
300 }
301 else
302 {
303 return std::make_unique<RefDivisionWorkload<float>>(descriptor, info);
304 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100305}
306
josh minor4a3c6102020-01-06 16:40:46 -0600307std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
308 const WorkloadInfo& info) const
309{
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000310 if (descriptor.m_Parameters.m_Operation == UnaryOperation::LogicalNot)
311 {
312 return std::make_unique<RefLogicalUnaryWorkload>(descriptor, info);
313 }
josh minor4a3c6102020-01-06 16:40:46 -0600314 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
315}
316
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100317std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
318 const WorkloadInfo& info) const
319{
Jan Eilers8eb25602020-03-09 12:13:48 +0000320 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100321 ComparisonQueueDescriptor comparisonDescriptor;
322 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
323
324 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100325}
326
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100327std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
328 const WorkloadInfo& info) const
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100329{
330 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
331}
332
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100333std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
334 const WorkloadInfo& info) const
335{
336 return std::make_unique<RefFillWorkload>(descriptor, info);
337}
338
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100339std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
340 const WorkloadInfo& info) const
341{
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100342 if(IsQuantizedType(info.m_InputTensorInfos[0].GetDataType()))
343 {
344 return nullptr;
345 }
346 else
347 {
348 return std::make_unique<RefFloorWorkload>(descriptor, info);
349 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100350}
351
352std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
353 const FullyConnectedQueueDescriptor& descriptor,
354 const WorkloadInfo& info) const
355{
356 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
357}
358
359std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
360 const WorkloadInfo& info) const
361{
362 return std::make_unique<RefGatherWorkload>(descriptor, info);
363}
364
365std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
366 const WorkloadInfo& info) const
367{
Jan Eilers8eb25602020-03-09 12:13:48 +0000368 IgnoreUnused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100369 ComparisonQueueDescriptor comparisonDescriptor;
370 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
371
372 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100373}
374
telsoa014fcda012018-03-09 14:13:49 +0000375std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
376 const WorkloadInfo& info) const
377{
378 if (info.m_InputTensorInfos.empty() )
379 {
380 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
381 }
382 if (info.m_OutputTensorInfos.empty())
383 {
384 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
385 }
386
387 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
388 {
389 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
390 }
391
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100392 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000393}
394
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100395std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
396 const InstanceNormalizationQueueDescriptor& descriptor,
397 const WorkloadInfo& info) const
398{
399 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
400}
401
402std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
403 const WorkloadInfo& info) const
404{
405 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
406}
407
James Conroyaba90cd2020-11-06 16:28:18 +0000408std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
409 const WorkloadInfo& info) const
410{
411 return std::make_unique<RefLogicalBinaryWorkload>(descriptor, info);
412}
413
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100414std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
415 const WorkloadInfo& info) const
416{
417 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
418}
419
420std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
421 const WorkloadInfo& info) const
422{
423 return std::make_unique<RefLstmWorkload>(descriptor, info);
424}
425
426std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
427 const WorkloadInfo& info) const
428{
Finn Williamscbd2c232020-06-22 15:58:32 +0100429 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
430 {
431 return std::make_unique<RefMaximumWorkload<int32_t>>(descriptor, info);
432 }
433 else
434 {
435 return std::make_unique<RefMaximumWorkload<float>>(descriptor, info);
436 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100437}
438
439std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
440 const WorkloadInfo& info) const
441{
442 return std::make_unique<RefMeanWorkload>(descriptor, info);
443}
444
445std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
446 const WorkloadInfo& info) const
447{
448 if (descriptor.m_Inputs.empty())
449 {
450 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
451 }
452 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
453}
454
455std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
456 const WorkloadInfo& info) const
457{
458 if (descriptor.m_Inputs.empty())
459 {
460 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
461 }
462 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
463}
464
465std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
466 const WorkloadInfo& info) const
467{
468 return CreateConcat(descriptor, info);
469}
470
471std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
472 const WorkloadInfo& info) const
473{
Finn Williamscbd2c232020-06-22 15:58:32 +0100474 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
475 {
476 return std::make_unique<RefMinimumWorkload<int32_t>>(descriptor, info);
477 }
478 else
479 {
480 return std::make_unique<RefMinimumWorkload<float>>(descriptor, info);
481 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100482}
483
484std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
485 const WorkloadInfo& info) const
486{
Finn Williamscbd2c232020-06-22 15:58:32 +0100487 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
488 {
489 return std::make_unique<RefMultiplicationWorkload<int32_t>>(descriptor, info);
490 }
491 else
492 {
493 return std::make_unique<RefMultiplicationWorkload<float>>(descriptor, info);
494 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100495}
496
497std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
498 const WorkloadInfo& info) const
499{
500 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
501}
502
telsoa014fcda012018-03-09 14:13:49 +0000503std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
504 const WorkloadInfo& info) const
505{
506 if (info.m_InputTensorInfos.empty() )
507 {
508 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
509 }
510 if (info.m_OutputTensorInfos.empty())
511 {
512 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
513 }
514 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
515 {
516 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
517 }
518
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100519 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000520}
521
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100522std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
523 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000524{
Sadik Armagan041b3c02020-06-04 10:32:18 +0100525 return std::make_unique<RefPadWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000526}
527
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100528std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
529 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000530{
Keith Davis0c2eeac2020-02-11 16:51:50 +0000531 if (IsQSymmS16(info))
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100532 {
533 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
534 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000535 else if (IsBFloat16(info))
536 {
537 return std::make_unique<RefPermuteBFloat16Workload>(descriptor, info);
538 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100539 else if (IsQAsymmS8(info))
540 {
541 return std::make_unique<RefPermuteQAsymmS8Workload>(descriptor, info);
542 }
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100543 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000544 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000545}
546
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100547std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
548 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000549{
Teresa Charlina3b20472019-06-06 11:12:32 +0100550 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000551}
552
Derek Lamberti901ea112019-12-10 22:07:09 +0000553std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
554 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000555{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100556 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000557}
558
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100559std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
560 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100561{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100562 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100563}
564
James Conroy4f1f8992020-04-29 20:01:10 +0100565std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
566 const WorkloadInfo& info) const
567{
568 return std::make_unique<RefQLstmWorkload>(descriptor, info);
569}
570
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100571std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
572 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000573{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100574 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000575}
576
Finn Williams2605b232020-06-10 15:53:46 +0100577std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
578 const WorkloadInfo& info) const
579{
580 return std::make_unique<RefRankWorkload>(descriptor, info);
581}
582
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100583std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
584 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000585{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100586 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100587}
588
Teresa Charlin970f43b2019-07-01 13:51:07 +0100589std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
590 const WorkloadInfo& info) const
591{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100592 return std::make_unique<RefResizeWorkload>(descriptor, info);
593}
594
telsoa014fcda012018-03-09 14:13:49 +0000595std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
596 const WorkloadInfo& info) const
597{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100598 ResizeQueueDescriptor resizeDescriptor;
599 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
600 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
601 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
602 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
603
604 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000605}
606
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000607std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
608 const WorkloadInfo& info) const
609{
Jan Eilers8eb25602020-03-09 12:13:48 +0000610 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600611 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
612 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
613
614 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000615}
616
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100617std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
618 const WorkloadInfo& info) const
619{
620 return std::make_unique<RefSliceWorkload>(descriptor, info);
621}
622
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100623std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
624 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100625{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100626 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
627}
628
629std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
630 const WorkloadInfo& info) const
631{
632 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
633}
634
635std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
636 const WorkloadInfo& info) const
637{
638 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
639}
640
641std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
642 const WorkloadInfo& info) const
643{
644 return std::make_unique<RefSplitterWorkload>(descriptor, info);
645}
646
647std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
648 const WorkloadInfo& info) const
649{
650 return std::make_unique<RefStackWorkload>(descriptor, info);
651}
652
653std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
654 const WorkloadInfo& info) const
655{
656 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
657}
658
659std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
660 const WorkloadInfo& info) const
661{
Finn Williamscbd2c232020-06-22 15:58:32 +0100662 if (info.m_InputTensorInfos[0].GetDataType() == armnn::DataType::Signed32)
663 {
664 return std::make_unique<RefSubtractionWorkload<int32_t>>(descriptor, info);
665 }
666 else
667 {
668 return std::make_unique<RefSubtractionWorkload<float>>(descriptor, info);
669 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100670}
671
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000672std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
673 const WorkloadInfo& info) const
674{
675 if (IsQSymmS16(info))
676 {
677 return std::make_unique<RefTransposeQSymm16Workload>(descriptor, info);
678 }
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000679 else if (IsBFloat16(info))
680 {
681 return std::make_unique<RefTransposeBFloat16Workload>(descriptor, info);
682 }
Sadik Armagan303980c2020-04-17 12:45:14 +0100683 else if (IsQAsymmS8(info))
684 {
685 return std::make_unique<RefTransposeQAsymmS8Workload>(descriptor, info);
686 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000687 return MakeWorkloadHelper<RefTransposeFloat16Workload, RefTransposeFloat32Workload, RefTransposeQAsymm8Workload,
688 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
689}
690
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100691std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
692 const TransposeConvolution2dQueueDescriptor& descriptor,
693 const WorkloadInfo& info) const
694{
695 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100696}
697
Matteo Martincigh49124022019-01-11 13:25:59 +0000698} // namespace armnn