blob: 4bdb84a5a55a5d84984659ce904a4fde47dd7946 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin8398edc2020-07-20 14:23:02 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#include "ClWorkloadFactory.hpp"
David Beck79141b92018-10-23 16:09:36 +01006#include "ClBackendId.hpp"
Sadik Armagan04a72972020-09-14 15:44:18 +01007#include "ClBackendModelContext.hpp"
Matthew Sloyan80fbcd52021-01-07 13:28:47 +00008#include "ClContextDeserializer.hpp"
9#include "ClContextSerializer.hpp"
telsoa014fcda012018-03-09 14:13:49 +000010
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000011#include <Layer.hpp>
12
David Beck0dbe0ee2018-09-24 15:59:27 +010013#include <armnn/Exceptions.hpp>
Sadik Armagan6b9eba22021-11-01 07:27:50 +000014#include <armnn/Logging.hpp>
David Beck0dbe0ee2018-09-24 15:59:27 +010015#include <armnn/Utils.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000016#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010017#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010018#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000020#include <backendsCommon/MakeWorkloadHelper.hpp>
Colm Donelan0c479742021-12-10 12:43:54 +000021#include <armnn/backends/MemCopyWorkload.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010022#include <backendsCommon/MemImportWorkload.hpp>
Colm Donelan0c479742021-12-10 12:43:54 +000023#include <armnn/backends/TensorHandle.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010024
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000025#include <cl/ClTensorHandle.hpp>
26#include <cl/workloads/ClWorkloads.hpp>
27#include <cl/workloads/ClWorkloadUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000028
Aron Virginas-Tar5caf9072018-11-14 18:35:18 +000029#include <arm_compute/core/CL/CLKernelLibrary.h>
30#include <arm_compute/runtime/CL/CLBufferAllocator.h>
31#include <arm_compute/runtime/CL/CLScheduler.h>
telsoa014fcda012018-03-09 14:13:49 +000032
Rob Hughes9542f902021-07-14 09:48:54 +010033#include <armnnUtils/Filesystem.hpp>
Matthew Sloyan80fbcd52021-01-07 13:28:47 +000034#include <fstream>
Sadik Armagandea8fb62020-11-26 10:38:11 +000035
Sadik Armaganb7851f92021-10-06 16:37:02 +010036#include <sys/stat.h>
37
telsoa014fcda012018-03-09 14:13:49 +000038namespace armnn
39{
40
David Beck79141b92018-10-23 16:09:36 +010041namespace
42{
43static const BackendId s_Id{ClBackendId()};
44}
45
telsoa01c577f2c2018-08-31 09:22:23 +010046bool ClWorkloadFactory::IsLayerSupported(const Layer& layer,
David Beck29c75de2018-10-23 13:35:58 +010047 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +010048 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000049{
David Beck79141b92018-10-23 16:09:36 +010050 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported);
51}
52
Sadik Armagan04a72972020-09-14 15:44:18 +010053bool ClWorkloadFactory::IsLayerSupported(const IConnectableLayer& layer,
54 Optional<DataType> dataType,
55 std::string& outReasonIfUnsupported,
56 const ModelOptions& modelOptions)
57{
58 return IWorkloadFactory::IsLayerSupported(s_Id, layer, dataType, outReasonIfUnsupported, modelOptions);
59}
60
David Beck79141b92018-10-23 16:09:36 +010061const BackendId& ClWorkloadFactory::GetBackendId() const
62{
63 return s_Id;
telsoa014fcda012018-03-09 14:13:49 +000064}
65
Sadik Armagandea8fb62020-11-26 10:38:11 +000066void ClWorkloadFactory::AfterWorkloadsCreated()
67{
68 if(m_ModelContextPtr)
69 {
70 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
71 if (modelOptions->SaveCachedNetwork())
72 {
Sadik Armaganb7851f92021-10-06 16:37:02 +010073 ClContextSerializer serializer;
74 serializer.Serialize(m_CLCompileContext);
75 auto cachedFd = modelOptions->GetCachedFileDescriptor();
76 if (cachedFd != -1)
77 {
78 std::vector<uint8_t> compiledContextData;
79 std::stringstream stream;
80 bool serialized = serializer.SaveSerializedToStream(stream);
81 if (serialized)
82 {
83 std::string const serializedString{stream.str()};
84 std::copy(serializedString.begin(),
85 serializedString.end(),
86 std::back_inserter(compiledContextData));
Sadik Armagan6b9eba22021-11-01 07:27:50 +000087 auto success = write(cachedFd, compiledContextData.data(), compiledContextData.size());
88 if (success == -1)
89 {
90 ARMNN_LOG(info) << "ClWorkloadFactory:: Could not cache the compiled context!";
91 }
Sadik Armaganb7851f92021-10-06 16:37:02 +010092 }
93 }
94
Sadik Armagandea8fb62020-11-26 10:38:11 +000095 // Save map to a filepath provided in ModelOptions
96 auto filePath = modelOptions->GetCachedNetworkFilePath();
97 if (filePath != "" && fs::exists(filePath) && fs::is_regular_file(filePath))
98 {
Matthew Sloyan80fbcd52021-01-07 13:28:47 +000099 // Serialize ClContext to the file specified
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000100 std::ofstream file(filePath, std::ios::out | std::ios::binary);
101 serializer.SaveSerializedToStream(file);
Sadik Armagandea8fb62020-11-26 10:38:11 +0000102 }
103 }
104 }
105}
106
Aron Virginas-Tara8e06ed2018-10-19 16:46:15 +0100107template <typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
108std::unique_ptr<IWorkload> ClWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
109 const WorkloadInfo& info,
110 Args&&... args)
111{
112 try
113 {
114 return MakeWorkloadHelper<FloatWorkload, Uint8Workload>(descriptor, info, std::forward<Args>(args)...);
115 }
116 catch (const cl::Error& clError)
117 {
118 throw WrapClError(clError, CHECK_LOCATION());
119 }
120}
121
122template <typename Workload, typename QueueDescriptorType, typename... Args>
123std::unique_ptr<IWorkload> ClWorkloadFactory::MakeWorkload(const QueueDescriptorType& descriptor,
124 const WorkloadInfo& info,
125 Args&&... args)
126{
127 try
128 {
129 return std::make_unique<Workload>(descriptor, info, std::forward<Args>(args)...);
130 }
131 catch (const cl::Error& clError)
132 {
133 throw WrapClError(clError, CHECK_LOCATION());
134 }
135}
136
Sadik Armagandea8fb62020-11-26 10:38:11 +0000137void ClWorkloadFactory::InitializeCLCompileContext()
138{
139 // Initialize our m_CLCompileContext using default device and context
Sadik Armagane9444752020-12-02 11:28:58 +0000140 auto context = arm_compute::CLKernelLibrary::get().context();
141 auto device = arm_compute::CLKernelLibrary::get().get_device();
Sadik Armagandea8fb62020-11-26 10:38:11 +0000142 m_CLCompileContext = arm_compute::CLCompileContext(context, device);
143
144 if (m_ModelContextPtr)
145 {
146 // Load saved programs if the user has set a filepath
147 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
148 auto filePath = modelOptions->GetCachedNetworkFilePath();
Sadik Armaganb7851f92021-10-06 16:37:02 +0100149 if (!(modelOptions->SaveCachedNetwork()))
Sadik Armagandea8fb62020-11-26 10:38:11 +0000150 {
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000151 ClContextDeserializer deserializer;
Sadik Armaganb7851f92021-10-06 16:37:02 +0100152 auto cachedFd = modelOptions->GetCachedFileDescriptor();
153 if (cachedFd != -1)
154 {
155 struct stat statBuffer;
156 if (fstat(cachedFd, &statBuffer) == 0)
157 {
158 long dataSize = static_cast<long>(statBuffer.st_size);
159 if( dataSize > 0)
160 {
161 auto offset = lseek(cachedFd, 0, SEEK_CUR);
162 if (offset == 0)
163 {
164 std::vector <uint8_t> compiledContextData(static_cast<unsigned int>(dataSize));
Sadik Armagan6b9eba22021-11-01 07:27:50 +0000165 auto success = pread(cachedFd, compiledContextData.data(), compiledContextData.size(), 0);
166 if (success != -1)
167 {
168 deserializer.DeserializeFromBinary(m_CLCompileContext,
169 context,
170 device,
171 compiledContextData);
172 }
Sadik Armaganb7851f92021-10-06 16:37:02 +0100173 }
174 }
175
176 }
177 }
178
179 if (filePath != "" && fs::exists(filePath) && fs::is_regular_file(filePath))
180 {
181 // Deserialize binary file and load into m_CLCompileContext
182 deserializer.Deserialize(m_CLCompileContext, context, device, filePath);
183 }
Sadik Armagandea8fb62020-11-26 10:38:11 +0000184 }
185 }
186}
187
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000188ClWorkloadFactory::ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager)
Sadik Armagan04a72972020-09-14 15:44:18 +0100189 : m_MemoryManager(memoryManager), m_ModelContextPtr(IBackendInternal::IBackendSpecificModelContextPtr{})
190{
Sadik Armagandea8fb62020-11-26 10:38:11 +0000191 InitializeCLCompileContext();
Sadik Armagan04a72972020-09-14 15:44:18 +0100192}
193
194ClWorkloadFactory::ClWorkloadFactory(const std::shared_ptr<ClMemoryManager>& memoryManager,
195 const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
196 : m_MemoryManager(memoryManager), m_ModelContextPtr(modelContextPtr)
telsoa014fcda012018-03-09 14:13:49 +0000197{
Sadik Armagandea8fb62020-11-26 10:38:11 +0000198 InitializeCLCompileContext();
telsoa014fcda012018-03-09 14:13:49 +0000199}
200
David Monahan3fb7e102019-08-20 11:25:29 +0100201std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
202 const bool IsMemoryManaged) const
telsoa014fcda012018-03-09 14:13:49 +0000203{
Jan Eilers8eb25602020-03-09 12:13:48 +0000204 IgnoreUnused(IsMemoryManaged);
telsoa01c577f2c2018-08-31 09:22:23 +0100205 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo);
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000206 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
telsoa01c577f2c2018-08-31 09:22:23 +0100207
208 return tensorHandle;
telsoa014fcda012018-03-09 14:13:49 +0000209}
210
Francis Murtagh351d13d2018-09-24 15:01:18 +0100211std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateTensorHandle(const TensorInfo& tensorInfo,
David Monahan3fb7e102019-08-20 11:25:29 +0100212 DataLayout dataLayout,
213 const bool IsMemoryManaged) const
Francis Murtagh351d13d2018-09-24 15:01:18 +0100214{
Jan Eilers8eb25602020-03-09 12:13:48 +0000215 IgnoreUnused(IsMemoryManaged);
Francis Murtagh351d13d2018-09-24 15:01:18 +0100216 std::unique_ptr<ClTensorHandle> tensorHandle = std::make_unique<ClTensorHandle>(tensorInfo, dataLayout);
Aron Virginas-Tar56055192018-11-12 18:10:43 +0000217 tensorHandle->SetMemoryGroup(m_MemoryManager->GetInterLayerMemoryGroup());
Francis Murtagh351d13d2018-09-24 15:01:18 +0100218
219 return tensorHandle;
220}
221
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100222std::unique_ptr<ITensorHandle> ClWorkloadFactory::CreateSubTensorHandle(ITensorHandle& parent,
223 TensorShape const& subTensorShape,
telsoa014fcda012018-03-09 14:13:49 +0000224 unsigned int const* subTensorOrigin) const
225{
telsoa014fcda012018-03-09 14:13:49 +0000226 arm_compute::Coordinates coords;
227 arm_compute::TensorShape shape = armcomputetensorutils::BuildArmComputeTensorShape(subTensorShape);
228
229 coords.set_num_dimensions(subTensorShape.GetNumDimensions());
230 for (unsigned int i = 0; i < subTensorShape.GetNumDimensions(); i++)
231 {
telsoa01c577f2c2018-08-31 09:22:23 +0100232 // Arm compute indexes tensor coords in reverse order.
telsoa014fcda012018-03-09 14:13:49 +0000233 unsigned int revertedIndex = subTensorShape.GetNumDimensions() - i - 1;
Matthew Sloyan171214c2020-09-09 09:07:37 +0100234 coords.set(i, armnn::numeric_cast<int>(subTensorOrigin[revertedIndex]));
telsoa014fcda012018-03-09 14:13:49 +0000235 }
236
Derek Lamberti0790dce2019-04-15 18:37:35 +0100237 const arm_compute::TensorShape parentShape = armcomputetensorutils::BuildArmComputeTensorShape(parent.GetShape());
238 if (!::arm_compute::error_on_invalid_subtensor(__func__, __FILE__, __LINE__, parentShape, coords, shape))
239 {
240 return nullptr;
241 }
242
telsoa01c577f2c2018-08-31 09:22:23 +0100243 return std::make_unique<ClSubTensorHandle>(
Jan Eilersbb446e52020-04-02 13:56:54 +0100244 PolymorphicDowncast<IClTensorHandle*>(&parent), shape, coords);
telsoa014fcda012018-03-09 14:13:49 +0000245}
246
telsoa014fcda012018-03-09 14:13:49 +0000247std::unique_ptr<IWorkload> ClWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100248 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000249{
Sadik Armagane9444752020-12-02 11:28:58 +0000250 return MakeWorkload<ClActivationWorkload>(descriptor, info, m_CLCompileContext);
telsoa014fcda012018-03-09 14:13:49 +0000251}
252
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100253std::unique_ptr<IWorkload> ClWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
254 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000255{
Sadik Armagane9444752020-12-02 11:28:58 +0000256 return MakeWorkload<ClAdditionWorkload>(descriptor, info, m_CLCompileContext);
telsoa014fcda012018-03-09 14:13:49 +0000257}
258
James Conroy2dc05722019-09-19 17:00:31 +0100259std::unique_ptr<IWorkload> ClWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
260 const WorkloadInfo& info) const
261{
Sadik Armagane9444752020-12-02 11:28:58 +0000262 return std::make_unique<ClArgMinMaxWorkload>(descriptor, info, m_CLCompileContext);
James Conroy2dc05722019-09-19 17:00:31 +0100263}
264
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100265std::unique_ptr<IWorkload> ClWorkloadFactory::CreateBatchNormalization(
266 const BatchNormalizationQueueDescriptor& descriptor,
267 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000268{
Sadik Armagane9444752020-12-02 11:28:58 +0000269 return MakeWorkload<ClBatchNormalizationFloatWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
telsoa014fcda012018-03-09 14:13:49 +0000270}
271
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100272std::unique_ptr<IWorkload> ClWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
telsoa014fcda012018-03-09 14:13:49 +0000273 const WorkloadInfo& info) const
274{
Sadik Armagane9444752020-12-02 11:28:58 +0000275 return MakeWorkload<ClBatchToSpaceNdWorkload>(descriptor, info, m_CLCompileContext);
telsoa014fcda012018-03-09 14:13:49 +0000276}
277
Sadik Armaganf40d6d42021-04-22 09:12:11 +0100278std::unique_ptr<IWorkload> ClWorkloadFactory::CreateCast(const CastQueueDescriptor& descriptor,
279 const WorkloadInfo& info) const
280{
281 return MakeWorkload<ClCastWorkload>(descriptor, info, m_CLCompileContext);
282}
283
Teresa Charlin1222dbd2021-09-02 13:58:52 +0100284std::unique_ptr<IWorkload> ClWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& descriptor,
285 const WorkloadInfo& info) const
286{
287 return MakeWorkload<ClChannelShuffleWorkload>(descriptor, info, m_CLCompileContext);
288}
289
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100290std::unique_ptr<IWorkload> ClWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
291 const WorkloadInfo& info) const
292{
Sadik Armagane9444752020-12-02 11:28:58 +0000293 return MakeWorkload<ClComparisonWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100294}
295
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100296std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
297 const WorkloadInfo& info) const
Jim Flynn4ed6c832019-05-20 11:02:46 +0100298{
Sadik Armagane9444752020-12-02 11:28:58 +0000299 return MakeWorkload<ClConcatWorkload>(descriptor, info, m_CLCompileContext);
Jim Flynn4ed6c832019-05-20 11:02:46 +0100300}
301
telsoa014fcda012018-03-09 14:13:49 +0000302std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100303 const WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000304{
Sadik Armagane9444752020-12-02 11:28:58 +0000305 return MakeWorkload<ClConstantWorkload>(descriptor, info, m_CLCompileContext);
telsoa014fcda012018-03-09 14:13:49 +0000306}
307
telsoa01c577f2c2018-08-31 09:22:23 +0100308std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvertFp16ToFp32(
309 const ConvertFp16ToFp32QueueDescriptor& descriptor,
310 const WorkloadInfo& info) const
311{
Sadik Armagane9444752020-12-02 11:28:58 +0000312 return MakeWorkload<ClConvertFp16ToFp32Workload>(descriptor, info, m_CLCompileContext);
telsoa01c577f2c2018-08-31 09:22:23 +0100313}
314
315std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvertFp32ToFp16(
316 const ConvertFp32ToFp16QueueDescriptor& descriptor,
317 const WorkloadInfo& info) const
318{
Sadik Armagane9444752020-12-02 11:28:58 +0000319 return MakeWorkload<ClConvertFp32ToFp16Workload>(descriptor, info, m_CLCompileContext);
telsoa01c577f2c2018-08-31 09:22:23 +0100320}
321
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100322std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
323 const WorkloadInfo& info) const
324{
Sadik Armagan04a72972020-09-14 15:44:18 +0100325 bool isFastMathEnabled = false;
326 if (m_ModelContextPtr)
327 {
328 if (m_ModelContextPtr.get() != nullptr)
329 {
330 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
331 if (modelOptions)
332 {
333 isFastMathEnabled = modelOptions->IsFastMathEnabled();
334 }
335 }
336 }
337 return MakeWorkload<ClConvolution2dWorkload>(descriptor,
338 info,
339 m_MemoryManager->GetIntraLayerManager(),
Sadik Armagane9444752020-12-02 11:28:58 +0000340 m_CLCompileContext,
Sadik Armagan04a72972020-09-14 15:44:18 +0100341 isFastMathEnabled);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100342}
343
Teresa Charlin615ad6c2021-10-26 12:22:20 +0100344std::unique_ptr<IWorkload> ClWorkloadFactory::CreateConvolution3d(const Convolution3dQueueDescriptor& descriptor,
345 const WorkloadInfo& info) const
346{
347 bool isFastMathEnabled = false;
348 if (m_ModelContextPtr)
349 {
350 if (m_ModelContextPtr.get() != nullptr)
351 {
352 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
353 if (modelOptions)
354 {
355 isFastMathEnabled = modelOptions->IsFastMathEnabled();
356 }
357 }
358 }
359 return MakeWorkload<ClConvolution3dWorkload>(descriptor,
360 info,
361 m_MemoryManager->GetIntraLayerManager(),
362 m_CLCompileContext,
363 isFastMathEnabled);
364}
365
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100366std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
367 const WorkloadInfo& info) const
368{
Sadik Armagane9444752020-12-02 11:28:58 +0000369 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100370}
371
372std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
373 const WorkloadInfo& info) const
374{
Sadik Armagane9444752020-12-02 11:28:58 +0000375 return MakeWorkload<ClDepthToSpaceWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100376}
377
378std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDepthwiseConvolution2d(
379 const DepthwiseConvolution2dQueueDescriptor& descriptor,
380 const WorkloadInfo& info) const
381{
Sadik Armagane9444752020-12-02 11:28:58 +0000382 return MakeWorkload<ClDepthwiseConvolutionWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100383}
384
385std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDequantize(const DequantizeQueueDescriptor& descriptor,
386 const WorkloadInfo& info) const
387{
Sadik Armagane9444752020-12-02 11:28:58 +0000388 return MakeWorkload<ClDequantizeWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100389}
390
391std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDetectionPostProcess(
392 const DetectionPostProcessQueueDescriptor& descriptor,
393 const WorkloadInfo& info) const
394{
Sadik Armagane9444752020-12-02 11:28:58 +0000395 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100396}
397
398std::unique_ptr<IWorkload> ClWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
399 const WorkloadInfo& info) const
400{
Teresa Charline11e63d2021-04-21 12:56:45 +0100401 return std::make_unique<ClDivisionWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100402}
403
josh minor4a3c6102020-01-06 16:40:46 -0600404std::unique_ptr<IWorkload> ClWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& descriptor,
405 const WorkloadInfo& info) const
406{
Sadik Armagan9fabf432020-05-27 13:40:58 +0100407 switch(descriptor.m_Parameters.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -0600408 {
Sadik Armagan9fabf432020-05-27 13:40:58 +0100409 case UnaryOperation::Abs:
James Conroyfe3ec942020-11-18 14:20:53 +0000410 {
411 AbsQueueDescriptor absQueueDescriptor;
412 absQueueDescriptor.m_Inputs = descriptor.m_Inputs;
413 absQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600414
Sadik Armagane9444752020-12-02 11:28:58 +0000415 return std::make_unique<ClAbsWorkload>(absQueueDescriptor, info, m_CLCompileContext);
James Conroyfe3ec942020-11-18 14:20:53 +0000416 }
Sadik Armagan9fabf432020-05-27 13:40:58 +0100417 case UnaryOperation::Exp:
Sadik Armagane9444752020-12-02 11:28:58 +0000418 return std::make_unique<ClExpWorkload>(descriptor, info, m_CLCompileContext);
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100419 case UnaryOperation::Log:
420 return std::make_unique<ClLogWorkload>(descriptor, info, m_CLCompileContext);
421 case UnaryOperation::LogicalNot:
422 return std::make_unique<ClLogicalNotWorkload>(descriptor, info, m_CLCompileContext);
Sadik Armagan9fabf432020-05-27 13:40:58 +0100423 case UnaryOperation::Neg:
Sadik Armagane9444752020-12-02 11:28:58 +0000424 return std::make_unique<ClNegWorkload>(descriptor, info, m_CLCompileContext);
Sadik Armagan9fabf432020-05-27 13:40:58 +0100425 case UnaryOperation::Rsqrt:
James Conroyfe3ec942020-11-18 14:20:53 +0000426 {
427 RsqrtQueueDescriptor rsqrtQueueDescriptor;
428 rsqrtQueueDescriptor.m_Inputs = descriptor.m_Inputs;
429 rsqrtQueueDescriptor.m_Outputs = descriptor.m_Outputs;
josh minor4a3c6102020-01-06 16:40:46 -0600430
Sadik Armagane9444752020-12-02 11:28:58 +0000431 return std::make_unique<ClRsqrtWorkload>(rsqrtQueueDescriptor, info, m_CLCompileContext);
James Conroyfe3ec942020-11-18 14:20:53 +0000432 }
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100433 case UnaryOperation::Sin:
434 return std::make_unique<ClSinWorkload>(descriptor, info, m_CLCompileContext);
Sadik Armagan9fabf432020-05-27 13:40:58 +0100435 default:
436 return nullptr;
josh minor4a3c6102020-01-06 16:40:46 -0600437 }
josh minor4a3c6102020-01-06 16:40:46 -0600438}
439
Sadik Armagan66aecb02020-06-24 11:42:20 +0100440std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFill(const FillQueueDescriptor& descriptor,
441 const WorkloadInfo& info) const
442{
Sadik Armagane9444752020-12-02 11:28:58 +0000443 return std::make_unique<ClFillWorkload>(descriptor, info, m_CLCompileContext);
Sadik Armagan66aecb02020-06-24 11:42:20 +0100444}
445
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100446std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
447 const WorkloadInfo& info) const
448{
Sadik Armagane9444752020-12-02 11:28:58 +0000449 return MakeWorkload<ClFloorFloatWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100450}
451
452std::unique_ptr<IWorkload> ClWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
453 const WorkloadInfo& info) const
454{
Sadik Armagane9444752020-12-02 11:28:58 +0000455 return MakeWorkload<ClFullyConnectedWorkload>(descriptor,
456 info,
457 m_MemoryManager->GetIntraLayerManager(),
458 m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100459}
460
461std::unique_ptr<IWorkload> ClWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
462 const WorkloadInfo& info) const
463{
Sadik Armagane9444752020-12-02 11:28:58 +0000464 return MakeWorkload<ClGatherWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100465}
466
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100467std::unique_ptr<IWorkload> ClWorkloadFactory::CreateInput(const InputQueueDescriptor& descriptor,
468 const WorkloadInfo& info) const
469{
470 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
471}
472
473std::unique_ptr<IWorkload> ClWorkloadFactory::CreateInstanceNormalization(
474 const InstanceNormalizationQueueDescriptor& descriptor,
475 const WorkloadInfo& info) const
476{
Sadik Armagane9444752020-12-02 11:28:58 +0000477 return MakeWorkload<ClInstanceNormalizationWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100478}
479
480std::unique_ptr<IWorkload> ClWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
481 const WorkloadInfo& info) const
482{
Sadik Armagane9444752020-12-02 11:28:58 +0000483 return MakeWorkload<ClL2NormalizationFloatWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100484}
485
James Conroyfe3ec942020-11-18 14:20:53 +0000486std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& descriptor,
487 const WorkloadInfo& info) const
488{
489 switch(descriptor.m_Parameters.m_Operation)
490 {
491 case LogicalBinaryOperation::LogicalAnd:
Sadik Armagane9444752020-12-02 11:28:58 +0000492 return std::make_unique<ClLogicalAndWorkload>(descriptor, info, m_CLCompileContext);
James Conroyfe3ec942020-11-18 14:20:53 +0000493 case LogicalBinaryOperation::LogicalOr:
Sadik Armagane9444752020-12-02 11:28:58 +0000494 return std::make_unique<ClLogicalOrWorkload>(descriptor, info, m_CLCompileContext);
James Conroyfe3ec942020-11-18 14:20:53 +0000495 default:
496 return nullptr;
497 }
498}
499
Teresa Charlin8398edc2020-07-20 14:23:02 +0100500std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
501 const WorkloadInfo& info) const
502{
Sadik Armagane9444752020-12-02 11:28:58 +0000503 return MakeWorkload<ClLogSoftmaxWorkload>(descriptor,
504 info,
505 m_MemoryManager->GetIntraLayerManager(),
506 m_CLCompileContext);
Teresa Charlin8398edc2020-07-20 14:23:02 +0100507}
508
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100509std::unique_ptr<IWorkload> ClWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
510 const WorkloadInfo& info) const
511{
Sadik Armagane9444752020-12-02 11:28:58 +0000512 return MakeWorkload<ClLstmFloatWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100513}
514
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000515std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
516 const WorkloadInfo& info) const
517{
Sadik Armagane9444752020-12-02 11:28:58 +0000518 return MakeWorkload<ClMaximumWorkload>(descriptor, info, m_CLCompileContext);
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000519}
520
narpra01a6bf9122018-09-10 09:50:09 +0100521std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
522 const WorkloadInfo& info) const
523{
Sadik Armagane9444752020-12-02 11:28:58 +0000524 return MakeWorkload<ClMeanWorkload>(descriptor, info, m_CLCompileContext);
narpra01a6bf9122018-09-10 09:50:09 +0100525}
526
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100527std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
528 const WorkloadInfo& info) const
jimfly012c9322a2018-09-19 10:59:49 +0100529{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100530 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
531 {
532 throw InvalidArgumentException("ClWorkloadFactory: Invalid null input for MemCopy workload");
533 }
534
535 return MakeWorkload<CopyMemGenericWorkload>(descriptor, info);
jimfly012c9322a2018-09-19 10:59:49 +0100536}
537
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100538std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
539 const WorkloadInfo& info) const
FrancisMurtagh20995952018-12-17 12:11:36 +0000540{
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100541 if (descriptor.m_Inputs.empty() || !descriptor.m_Inputs[0])
542 {
543 throw InvalidArgumentException("ClWorkloadFactory: Invalid null input for MemImport workload");
544 }
545
546 return std::make_unique<ImportMemGenericWorkload>(descriptor, info);
FrancisMurtagh20995952018-12-17 12:11:36 +0000547}
548
kevmay0190539692018-11-29 08:40:19 +0000549std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
550 const WorkloadInfo& info) const
551{
Sadik Armagane9444752020-12-02 11:28:58 +0000552 return MakeWorkload<ClMinimumWorkload>(descriptor, info, m_CLCompileContext);
kevmay0190539692018-11-29 08:40:19 +0000553}
554
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100555std::unique_ptr<IWorkload> ClWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
556 const WorkloadInfo& info) const
557{
Sadik Armagane9444752020-12-02 11:28:58 +0000558 return MakeWorkload<ClMultiplicationWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100559}
560
561std::unique_ptr<IWorkload> ClWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
562 const WorkloadInfo& info) const
563{
Sadik Armagane9444752020-12-02 11:28:58 +0000564 return MakeWorkload<ClNormalizationFloatWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100565}
566
567std::unique_ptr<IWorkload> ClWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
568 const WorkloadInfo& info) const
569{
570 return std::make_unique<CopyMemGenericWorkload>(descriptor, info);
571}
572
573std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
574 const WorkloadInfo& info) const
575{
Sadik Armagane9444752020-12-02 11:28:58 +0000576 return MakeWorkload<ClPadWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100577}
578
579std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000580 const WorkloadInfo& info) const
581{
Sadik Armagane9444752020-12-02 11:28:58 +0000582 return MakeWorkload<ClPermuteWorkload>(descriptor, info, m_CLCompileContext);
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000583}
584
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100585std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
586 const WorkloadInfo& info) const
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000587{
Sadik Armagane9444752020-12-02 11:28:58 +0000588 return MakeWorkload<ClPooling2dWorkload>(descriptor, info, m_CLCompileContext);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000589}
590
Matteo Martincigh49124022019-01-11 13:25:59 +0000591std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
592 const WorkloadInfo& info) const
593{
Sadik Armagane9444752020-12-02 11:28:58 +0000594 return MakeWorkload<NullWorkload, NullWorkload>(descriptor, info, m_CLCompileContext);
Matteo Martincigh49124022019-01-11 13:25:59 +0000595}
596
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100597std::unique_ptr<IWorkload> ClWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
598 const WorkloadInfo &info) const
narpra01b89b05f2019-01-16 09:53:09 +0000599{
Sadik Armagane9444752020-12-02 11:28:58 +0000600 return MakeWorkload<ClPreluWorkload>(descriptor, info, m_CLCompileContext);
narpra01b89b05f2019-01-16 09:53:09 +0000601}
602
Ryan OShea2323af42020-05-13 16:36:19 +0100603std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& descriptor,
604 const WorkloadInfo& info) const
605{
Sadik Armagane9444752020-12-02 11:28:58 +0000606 return std::make_unique<ClQLstmWorkload>(descriptor, info, m_CLCompileContext);
Ryan OShea2323af42020-05-13 16:36:19 +0100607}
608
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100609std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
610 const WorkloadInfo& info) const
Aron Virginas-Tar7a3e2fe2019-06-27 18:54:47 +0100611{
Sadik Armagane9444752020-12-02 11:28:58 +0000612 return MakeWorkload<ClQuantizeWorkload>(descriptor, info, m_CLCompileContext);
James Conroyd2aa85e2019-07-01 17:12:40 +0100613}
614
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100615std::unique_ptr<IWorkload> ClWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
616 const WorkloadInfo& info) const
617{
Sadik Armagane9444752020-12-02 11:28:58 +0000618 return MakeWorkload<ClQuantizedLstmWorkload>(descriptor, info, m_CLCompileContext);
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100619}
620
David Monahanc11ba462020-12-03 11:09:46 +0000621std::unique_ptr<IWorkload> ClWorkloadFactory::CreateRank(const RankQueueDescriptor& descriptor,
622 const WorkloadInfo& info) const
623{
624 return std::make_unique<ClRankWorkload>(descriptor, info);
625}
626
Sadik Armagana2747482021-02-09 10:28:54 +0000627std::unique_ptr<IWorkload> ClWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& descriptor,
628 const WorkloadInfo& info) const
629{
630 return std::make_unique<ClReduceWorkload>(descriptor, info);
631}
632
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100633std::unique_ptr<IWorkload> ClWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
634 const WorkloadInfo& info) const
635{
Sadik Armagane9444752020-12-02 11:28:58 +0000636 return MakeWorkload<ClReshapeWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100637}
638
639std::unique_ptr<IWorkload> ClWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
640 const WorkloadInfo& info) const
641{
Sadik Armagane9444752020-12-02 11:28:58 +0000642 return MakeWorkload<ClResizeWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100643}
644
Aron Virginas-Tar94c4fef2019-11-25 15:37:08 +0000645std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
646 const WorkloadInfo& info) const
647{
Sadik Armagane9444752020-12-02 11:28:58 +0000648 return MakeWorkload<ClSliceWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar94c4fef2019-11-25 15:37:08 +0000649}
650
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100651std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
652 const WorkloadInfo& info) const
653{
Sadik Armagane9444752020-12-02 11:28:58 +0000654 return std::make_unique<ClSoftmaxWorkload>(descriptor,
655 info,
656 m_MemoryManager->GetIntraLayerManager(),
657 m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100658}
659
660std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
661 const WorkloadInfo& info) const
662{
Sadik Armagane9444752020-12-02 11:28:58 +0000663 return MakeWorkload<ClSpaceToBatchNdWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100664}
665
666std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
667 const WorkloadInfo& info) const
668{
Sadik Armagane9444752020-12-02 11:28:58 +0000669 return MakeWorkload<ClSpaceToDepthWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100670}
671
672std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
673 const WorkloadInfo& info) const
674{
Sadik Armagane9444752020-12-02 11:28:58 +0000675 return MakeWorkload<ClSplitterWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100676}
677
Matthew Jacksond5166102019-07-31 14:06:28 +0100678std::unique_ptr<IWorkload> ClWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
679 const WorkloadInfo& info) const
680{
Sadik Armagane9444752020-12-02 11:28:58 +0000681 return MakeWorkload<ClStackWorkload>(descriptor, info, m_CLCompileContext);
Matthew Jacksond5166102019-07-31 14:06:28 +0100682}
683
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100684std::unique_ptr<IWorkload> ClWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100685 const WorkloadInfo& info) const
686{
Sadik Armagane9444752020-12-02 11:28:58 +0000687 return MakeWorkload<ClStridedSliceWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100688}
689
690std::unique_ptr<IWorkload> ClWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
691 const WorkloadInfo& info) const
692{
Sadik Armagane9444752020-12-02 11:28:58 +0000693 return MakeWorkload<ClSubtractionWorkload>(descriptor, info, m_CLCompileContext);
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100694}
695
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000696std::unique_ptr<IWorkload> ClWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& descriptor,
697 const WorkloadInfo& info) const
698{
Sadik Armagane9444752020-12-02 11:28:58 +0000699 return MakeWorkload<ClTransposeWorkload>(descriptor, info, m_CLCompileContext);
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000700}
701
Aron Virginas-Tar8168f402019-10-04 13:10:16 +0100702std::unique_ptr<IWorkload> ClWorkloadFactory::CreateTransposeConvolution2d(
703 const TransposeConvolution2dQueueDescriptor& descriptor,
704 const WorkloadInfo& info) const
705{
Sadik Armagane9444752020-12-02 11:28:58 +0000706 return MakeWorkload<ClTransposeConvolution2dWorkload>(descriptor,
707 info,
708 m_MemoryManager->GetIntraLayerManager(),
709 m_CLCompileContext);
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100710}
711
telsoa014fcda012018-03-09 14:13:49 +0000712} // namespace armnn