blob: 792bd7d3ad57d655a3f11b9bfbf58ef714a7c732 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matthew Bentham4cefc412019-06-18 16:14:34 +01005#include <Layer.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/CpuTensorHandle.hpp>
7#include <backendsCommon/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +01008#include <backendsCommon/MemImportWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <backendsCommon/MakeWorkloadHelper.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010#include "RefWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +010011#include "RefBackendId.hpp"
David Beckb4540be2018-09-24 13:18:27 +010012#include "workloads/RefWorkloads.hpp"
Matthew Bentham4cefc412019-06-18 16:14:34 +010013#include "RefTensorHandle.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014
telsoa014fcda012018-03-09 14:13:49 +000015
16namespace armnn
17{
18
David Beck79141b92018-10-23 16:09:36 +010019namespace
20{
21static const BackendId s_Id{RefBackendId()};
22}
telsoa014fcda012018-03-09 14:13:49 +000023template <typename F32Workload, typename U8Workload, typename QueueDescriptorType>
24std::unique_ptr<IWorkload> RefWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
Aron Virginas-Tare662a942019-10-14 15:12:00 +010025 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +000026{
Keith Davis5204aa82020-01-27 15:24:59 +000027 return MakeWorkloadHelper<NullWorkload, F32Workload, U8Workload, NullWorkload, NullWorkload, NullWorkload>
28 (descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +000029}
30
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010031template <DataType ArmnnType>
32bool IsDataType(const WorkloadInfo& info)
Jim Flynn82fbe7c2019-04-02 15:19:08 +010033{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010034 auto checkType = [](const TensorInfo& tensorInfo) {return tensorInfo.GetDataType() == ArmnnType;};
35 auto it = std::find_if(std::begin(info.m_InputTensorInfos), std::end(info.m_InputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010036 if (it != std::end(info.m_InputTensorInfos))
37 {
38 return true;
39 }
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010040 it = std::find_if(std::begin(info.m_OutputTensorInfos), std::end(info.m_OutputTensorInfos), checkType);
Jim Flynn82fbe7c2019-04-02 15:19:08 +010041 if (it != std::end(info.m_OutputTensorInfos))
42 {
43 return true;
44 }
45 return false;
46}
47
Ferran Balaguerd73d14f2019-06-10 10:29:54 +010048bool IsFloat16(const WorkloadInfo& info)
49{
50 return IsDataType<DataType::Float16>(info);
51}
52
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +010053bool IsQSymm16(const WorkloadInfo& info)
nikraj0199a66312019-06-06 10:31:27 +010054{
Derek Lambertif90c56d2020-01-10 17:14:08 +000055 return IsDataType<DataType::QSymmS16>(info);
nikraj0199a66312019-06-06 10:31:27 +010056}
57
Keith Davis5204aa82020-01-27 15:24:59 +000058bool IsQSymm8(const WorkloadInfo& info)
59{
60 return IsDataType<DataType::QSymmS8>(info);
61}
62
Matthew Bentham7c1603a2019-06-21 17:22:23 +010063RefWorkloadFactory::RefWorkloadFactory(const std::shared_ptr<RefMemoryManager>& memoryManager)
64 : m_MemoryManager(memoryManager)
65{
66}
67
telsoa01c577f2c2018-08-31 09:22:23 +010068RefWorkloadFactory::RefWorkloadFactory()
Matthew Bentham7c1603a2019-06-21 17:22:23 +010069 : m_MemoryManager(new RefMemoryManager())
telsoa014fcda012018-03-09 14:13:49 +000070{
71}
72
David Beck79141b92018-10-23 16:09:36 +010073const BackendId& RefWorkloadFactory::GetBackendId() const
74{
75 return s_Id;
76}
77
David Beck29c75de2018-10-23 13:35:58 +010078bool RefWorkloadFactory::IsLayerSupported(const Layer& layer,
79 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010080 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000081{
David Beck79141b92018-10-23 16:09:36 +010082 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +000083}
84
David Monahan3fb7e102019-08-20 11:25:29 +010085std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
Derek Lamberti901ea112019-12-10 22:07:09 +000086 const bool isMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +000087{
David Monahan3fb7e102019-08-20 11:25:29 +010088 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
89 // to unmanaged memory. This also ensures memory alignment.
Derek Lamberti901ea112019-12-10 22:07:09 +000090 boost::ignore_unused(isMemoryManaged);
Matthew Bentham7c1603a2019-06-21 17:22:23 +010091 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
telsoa014fcda012018-03-09 14:13:49 +000092}
93
Francis Murtagh351d13d2018-09-24 15:01:18 +010094std::unique_ptr<ITensorHandle> RefWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +010095 DataLayout dataLayout,
Derek Lamberti901ea112019-12-10 22:07:09 +000096 const bool isMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +010097{
David Monahan3fb7e102019-08-20 11:25:29 +010098 // For Ref it is okay to make the TensorHandle memory managed as it can also store a pointer
99 // to unmanaged memory. This also ensures memory alignment.
Derek Lamberti901ea112019-12-10 22:07:09 +0000100 boost::ignore_unused(isMemoryManaged, dataLayout);
Matthew Bentham7c1603a2019-06-21 17:22:23 +0100101 return std::make_unique<RefTensorHandle>(tensorInfo, m_MemoryManager);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100102}
103
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100104std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
105 const WorkloadInfo& info) const
106{
josh minor4a3c6102020-01-06 16:40:46 -0600107 boost::ignore_unused(descriptor);
108 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
109 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Abs;
110
111 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100112}
113
114std::unique_ptr<IWorkload> RefWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
115 const WorkloadInfo& info) const
116{
117 return std::make_unique<RefActivationWorkload>(descriptor, info);
118}
119
120std::unique_ptr<IWorkload> RefWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
121 const WorkloadInfo& info) const
122{
123 return std::make_unique<RefAdditionWorkload>(descriptor, info);
124}
125
126std::unique_ptr<IWorkload> RefWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
127 const WorkloadInfo& info) const
128{
129 return std::make_unique<RefArgMinMaxWorkload>(descriptor, info);
130}
131
132std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchNormalization(
133 const BatchNormalizationQueueDescriptor& descriptor,
134 const WorkloadInfo& info) const
135{
136 return std::make_unique<RefBatchNormalizationWorkload>(descriptor, info);
137}
138
139std::unique_ptr<IWorkload> RefWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
140 const WorkloadInfo& info) const
141{
142 return std::make_unique<RefBatchToSpaceNdWorkload>(descriptor, info);
143}
144
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100145std::unique_ptr<IWorkload> RefWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
146 const WorkloadInfo& info) const
147{
148 return std::make_unique<RefComparisonWorkload>(descriptor, info);
149}
150
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100151std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
152 const WorkloadInfo& info) const
153{
154 return std::make_unique<RefConcatWorkload>(descriptor, info);
155}
156
157std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
158 const WorkloadInfo& info) const
159{
160 return std::make_unique<RefConstantWorkload>(descriptor, info);
161}
162
163std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp16ToFp32(
164 const ConvertFp16ToFp32QueueDescriptor& descriptor,
165 const WorkloadInfo& info) const
166{
167 return std::make_unique<RefConvertFp16ToFp32Workload>(descriptor, info);
168}
169
170std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvertFp32ToFp16(
171 const ConvertFp32ToFp16QueueDescriptor& descriptor,
172 const WorkloadInfo& info) const
173{
174 return std::make_unique<RefConvertFp32ToFp16Workload>(descriptor, info);
175}
176
177std::unique_ptr<IWorkload> RefWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
178 const WorkloadInfo& info) const
179{
180 return std::make_unique<RefConvolution2dWorkload>(descriptor, info);
181}
182
183std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
184 const WorkloadInfo& info) const
185{
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000186 if (IsFloat16(info))
187 {
188 return std::make_unique<RefDebugFloat16Workload>(descriptor, info);
189 }
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100190 if (IsQSymm16(info))
191 {
192 return std::make_unique<RefDebugQSymm16Workload>(descriptor, info);
193 }
Keith Davis5204aa82020-01-27 15:24:59 +0000194 if (IsQSymm8(info))
195 {
196 return std::make_unique<RefDebugQSymm8Workload>(descriptor, info);
197 }
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000198 if (IsDataType<DataType::Signed32>(info))
199 {
200 return std::make_unique<RefDebugSigned32Workload>(descriptor, info);
201 }
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000202
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100203 return MakeWorkload<RefDebugFloat32Workload, RefDebugQAsymm8Workload>(descriptor, info);
204}
205
206std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
207 const WorkloadInfo& info) const
208{
209 return std::make_unique<RefDepthToSpaceWorkload>(descriptor, info);
210}
211
212std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDepthwiseConvolution2d(
213 const DepthwiseConvolution2dQueueDescriptor& descriptor,
214 const WorkloadInfo& info) const
215{
216 return std::make_unique<RefDepthwiseConvolution2dWorkload>(descriptor, info);
217}
218
219std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
220 const WorkloadInfo& info) const
221{
222 return std::make_unique<RefDequantizeWorkload>(descriptor, info);
223}
224
225std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDetectionPostProcess(
226 const DetectionPostProcessQueueDescriptor& descriptor,
227 const WorkloadInfo& info) const
228{
229 return std::make_unique<RefDetectionPostProcessWorkload>(descriptor, info);
230}
231
232std::unique_ptr<IWorkload> RefWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
233 const WorkloadInfo& info) const
234{
235 return std::make_unique<RefDivisionWorkload>(descriptor, info);
236}
237
josh minor4a3c6102020-01-06 16:40:46 -0600238std::unique_ptr<IWorkload> RefWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
239 const WorkloadInfo& info) const
240{
241 return std::make_unique<RefElementwiseUnaryWorkload>(descriptor, info);
242}
243
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100244std::unique_ptr<IWorkload> RefWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
245 const WorkloadInfo& info) const
246{
Derek Lamberti901ea112019-12-10 22:07:09 +0000247 boost::ignore_unused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100248 ComparisonQueueDescriptor comparisonDescriptor;
249 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Equal;
250
251 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100252}
253
254std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFakeQuantization(
255 const FakeQuantizationQueueDescriptor& descriptor,
256 const WorkloadInfo& info) const
257{
258 return MakeWorkload<RefFakeQuantizationFloat32Workload, NullWorkload>(descriptor, info);
259}
260
261std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
262 const WorkloadInfo& info) const
263{
264 return std::make_unique<RefFloorWorkload>(descriptor, info);
265}
266
267std::unique_ptr<IWorkload> RefWorkloadFactory::CreateFullyConnected(
268 const FullyConnectedQueueDescriptor& descriptor,
269 const WorkloadInfo& info) const
270{
271 return std::make_unique<RefFullyConnectedWorkload>(descriptor, info);
272}
273
274std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
275 const WorkloadInfo& info) const
276{
277 return std::make_unique<RefGatherWorkload>(descriptor, info);
278}
279
280std::unique_ptr<IWorkload> RefWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
281 const WorkloadInfo& info) const
282{
Derek Lamberti901ea112019-12-10 22:07:09 +0000283 boost::ignore_unused(descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100284 ComparisonQueueDescriptor comparisonDescriptor;
285 comparisonDescriptor.m_Parameters.m_Operation = ComparisonOperation::Greater;
286
287 return CreateComparison(comparisonDescriptor, info);
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100288}
289
telsoa014fcda012018-03-09 14:13:49 +0000290std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
291 const WorkloadInfo& info) const
292{
293 if (info.m_InputTensorInfos.empty() )
294 {
295 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Input cannot be zero length");
296 }
297 if (info.m_OutputTensorInfos.empty())
298 {
299 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: Output cannot be zero length");
300 }
301
302 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
303 {
304 throw InvalidArgumentException("RefWorkloadFactory::CreateInput: data input and output differ in byte count.");
305 }
306
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100307 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000308}
309
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100310std::unique_ptr<IWorkload> RefWorkloadFactory::CreateInstanceNormalization(
311 const InstanceNormalizationQueueDescriptor& descriptor,
312 const WorkloadInfo& info) const
313{
314 return std::make_unique<RefInstanceNormalizationWorkload>(descriptor, info);
315}
316
317std::unique_ptr<IWorkload> RefWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
318 const WorkloadInfo& info) const
319{
320 return std::make_unique<RefL2NormalizationWorkload>(descriptor, info);
321}
322
323std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
324 const WorkloadInfo& info) const
325{
326 return std::make_unique<RefLogSoftmaxWorkload>(descriptor, info);
327}
328
329std::unique_ptr<IWorkload> RefWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
330 const WorkloadInfo& info) const
331{
332 return std::make_unique<RefLstmWorkload>(descriptor, info);
333}
334
335std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
336 const WorkloadInfo& info) const
337{
338 return std::make_unique<RefMaximumWorkload>(descriptor, info);
339}
340
341std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
342 const WorkloadInfo& info) const
343{
344 return std::make_unique<RefMeanWorkload>(descriptor, info);
345}
346
347std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
348 const WorkloadInfo& info) const
349{
350 if (descriptor.m_Inputs.empty())
351 {
352 throw InvalidArgumentException("RefWorkloadFactory: CreateMemCopy() expected an input tensor.");
353 }
354 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
355}
356
357std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
358 const WorkloadInfo& info) const
359{
360 if (descriptor.m_Inputs.empty())
361 {
362 throw InvalidArgumentException("RefWorkloadFactory: CreateMemImport() expected an input tensor.");
363 }
364 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
365}
366
367std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
368 const WorkloadInfo& info) const
369{
370 return CreateConcat(descriptor, info);
371}
372
373std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
374 const WorkloadInfo& info) const
375{
376 return std::make_unique<RefMinimumWorkload>(descriptor, info);
377}
378
379std::unique_ptr<IWorkload> RefWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
380 const WorkloadInfo& info) const
381{
382 return std::make_unique<RefMultiplicationWorkload>(descriptor, info);
383}
384
385std::unique_ptr<IWorkload> RefWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
386 const WorkloadInfo& info) const
387{
388 return std::make_unique<RefNormalizationWorkload>(descriptor, info);
389}
390
telsoa014fcda012018-03-09 14:13:49 +0000391std::unique_ptr<IWorkload> RefWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
392 const WorkloadInfo& info) const
393{
394 if (info.m_InputTensorInfos.empty() )
395 {
396 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Input cannot be zero length");
397 }
398 if (info.m_OutputTensorInfos.empty())
399 {
400 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: Output cannot be zero length");
401 }
402 if (info.m_InputTensorInfos[0].GetNumBytes() != info.m_OutputTensorInfos[0].GetNumBytes())
403 {
404 throw InvalidArgumentException("RefWorkloadFactory::CreateOutput: data input and output differ in byte count.");
405 }
406
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100407 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000408}
409
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100410std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
411 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000412{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100413 if (IsQSymm16(info))
414 {
415 return std::make_unique<RefPadQSymm16Workload>(descriptor, info);
416 }
417 else if (IsFloat16(info))
418 {
419 return std::make_unique<RefPadFloat16Workload>(descriptor, info);
420 }
421 return MakeWorkload<RefPadFloat32Workload, RefPadQAsymm8Workload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000422}
423
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100424std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
425 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000426{
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +0100427 if (IsQSymm16(info))
428 {
429 return std::make_unique<RefPermuteQSymm16Workload>(descriptor, info);
430 }
431 return MakeWorkloadHelper<RefPermuteFloat16Workload, RefPermuteFloat32Workload, RefPermuteQAsymm8Workload,
Keith Davis5204aa82020-01-27 15:24:59 +0000432 NullWorkload, NullWorkload, NullWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000433}
434
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100435std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
436 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000437{
Teresa Charlina3b20472019-06-06 11:12:32 +0100438 return std::make_unique<RefPooling2dWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000439}
440
Derek Lamberti901ea112019-12-10 22:07:09 +0000441std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
442 const WorkloadInfo& /*info*/) const
telsoa014fcda012018-03-09 14:13:49 +0000443{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100444 return nullptr;
telsoa014fcda012018-03-09 14:13:49 +0000445}
446
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100447std::unique_ptr<IWorkload> RefWorkloadFactory::CreatePrelu(const PreluQueueDescriptor& descriptor,
448 const WorkloadInfo& info) const
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100449{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100450 return std::make_unique<RefPreluWorkload>(descriptor, info);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100451}
452
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100453std::unique_ptr<IWorkload> RefWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
454 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000455{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100456 return std::make_unique<RefQuantizeWorkload>(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000457}
458
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100459std::unique_ptr<IWorkload> RefWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
460 const WorkloadInfo& info) const
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000461{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100462 return std::make_unique<RefReshapeWorkload>(descriptor, info);
Derek Lambertif674aa02019-08-01 15:56:25 +0100463}
464
Teresa Charlin970f43b2019-07-01 13:51:07 +0100465std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
466 const WorkloadInfo& info) const
467{
Teresa Charlin970f43b2019-07-01 13:51:07 +0100468 return std::make_unique<RefResizeWorkload>(descriptor, info);
469}
470
telsoa014fcda012018-03-09 14:13:49 +0000471std::unique_ptr<IWorkload> RefWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
472 const WorkloadInfo& info) const
473{
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100474 ResizeQueueDescriptor resizeDescriptor;
475 resizeDescriptor.m_Parameters.m_Method = ResizeMethod::Bilinear;
476 resizeDescriptor.m_Parameters.m_DataLayout = descriptor.m_Parameters.m_DataLayout;
477 resizeDescriptor.m_Parameters.m_TargetWidth = descriptor.m_Parameters.m_TargetWidth;
478 resizeDescriptor.m_Parameters.m_TargetHeight = descriptor.m_Parameters.m_TargetHeight;
479
480 return CreateResize(resizeDescriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000481}
482
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000483std::unique_ptr<IWorkload> RefWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
484 const WorkloadInfo& info) const
485{
josh minor4a3c6102020-01-06 16:40:46 -0600486 boost::ignore_unused(descriptor);
487 ElementwiseUnaryQueueDescriptor elementwiseUnaryDescriptor;
488 elementwiseUnaryDescriptor.m_Parameters.m_Operation = UnaryOperation::Rsqrt;
489
490 return CreateElementwiseUnary(elementwiseUnaryDescriptor, info);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000491}
492
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +0100493std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
494 const WorkloadInfo& info) const
495{
496 return std::make_unique<RefSliceWorkload>(descriptor, info);
497}
498
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100499std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
500 const WorkloadInfo& info) const
Kevin May09ca49c2019-10-09 12:37:34 +0100501{
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100502 return std::make_unique<RefSoftmaxWorkload>(descriptor, info);
503}
504
505std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
506 const WorkloadInfo& info) const
507{
508 return std::make_unique<RefSpaceToBatchNdWorkload>(descriptor, info);
509}
510
511std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
512 const WorkloadInfo& info) const
513{
514 return std::make_unique<RefSpaceToDepthWorkload>(descriptor, info);
515}
516
517std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
518 const WorkloadInfo& info) const
519{
520 return std::make_unique<RefSplitterWorkload>(descriptor, info);
521}
522
523std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
524 const WorkloadInfo& info) const
525{
526 return std::make_unique<RefStackWorkload>(descriptor, info);
527}
528
529std::unique_ptr<IWorkload> RefWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
530 const WorkloadInfo& info) const
531{
532 return std::make_unique<RefStridedSliceWorkload>(descriptor, info);
533}
534
535std::unique_ptr<IWorkload> RefWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
536 const WorkloadInfo& info) const
537{
538 return std::make_unique<RefSubtractionWorkload>(descriptor, info);
539}
540
541std::unique_ptr<IWorkload> RefWorkloadFactory::CreateTransposeConvolution2d(
542 const TransposeConvolution2dQueueDescriptor& descriptor,
543 const WorkloadInfo& info) const
544{
545 return std::make_unique<RefTransposeConvolution2dWorkload>(descriptor, info);
Kevin May09ca49c2019-10-09 12:37:34 +0100546}
547
Matteo Martincigh49124022019-01-11 13:25:59 +0000548} // namespace armnn