blob: 1acaba0384677c2d71e7f3c0ca986d79f7758062 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "ClLayerSupport.hpp"
David Beck3e9e1152018-10-17 14:17:50 +01007#include "ClBackendId.hpp"
Sadik Armagan045f6be2020-09-10 13:37:32 +01008#include "ClBackendModelContext.hpp"
arovir017c22c702018-10-09 11:16:46 +01009
Matteo Martincighc601aa62019-10-29 15:03:22 +000010#include <armnn/BackendRegistry.hpp>
11
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000012#include <InternalTypes.hpp>
13#include <LayerSupportCommon.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014
Sadik Armagan045f6be2020-09-10 13:37:32 +010015#include <armnn/utility/IgnoreUnused.hpp>
16#include <armnn/utility/PolymorphicDowncast.hpp>
17
Matteo Martincighd95e9062019-01-31 15:35:59 +000018#if defined(ARMCOMPUTECL_ENABLED)
Narumol Prangnawarat74135832019-05-23 15:07:33 +010019#include <aclCommon/ArmComputeUtils.hpp>
Aron Virginas-Tar710f6642019-11-27 14:48:32 +000020#include <aclCommon/ArmComputeTensorUtils.hpp>
Aron Virginas-Tar82046942019-09-09 15:18:29 +010021#include "workloads/ClAbsWorkload.hpp"
David Beckac42efd2018-09-26 17:41:13 +010022#include "workloads/ClAdditionWorkload.hpp"
Nattapat Chaimanowonge06757e2018-10-11 15:39:18 +010023#include "workloads/ClActivationWorkload.hpp"
James Conroy2dc05722019-09-19 17:00:31 +010024#include "workloads/ClArgMinMaxWorkload.hpp"
Teresa Charlin94916a52022-10-19 08:48:07 +010025#include "workloads/ClBatchMatMulWorkload.hpp"
David Beckac42efd2018-09-26 17:41:13 +010026#include "workloads/ClBatchNormalizationFloatWorkload.hpp"
Mike Kelly831faed2018-11-28 11:52:08 +000027#include "workloads/ClBatchToSpaceNdWorkload.hpp"
Sadik Armaganf40d6d42021-04-22 09:12:11 +010028#include "workloads/ClCastWorkload.hpp"
Teresa Charlin1222dbd2021-09-02 13:58:52 +010029#include "workloads/ClChannelShuffleWorkload.hpp"
Teresa Charlin2b030d92020-03-27 16:40:56 +000030#include "workloads/ClComparisonWorkload.hpp"
Mike Kelly0886ac42020-04-27 09:55:40 +010031#include "workloads/ClConstantWorkload.hpp"
David Beckac42efd2018-09-26 17:41:13 +010032#include "workloads/ClConvertFp16ToFp32Workload.hpp"
33#include "workloads/ClConvertFp32ToFp16Workload.hpp"
Matthew Benthamd8067922018-10-03 17:18:04 +010034#include "workloads/ClConvolution2dWorkload.hpp"
Teresa Charlin615ad6c2021-10-26 12:22:20 +010035#include "workloads/ClConvolution3dWorkload.hpp"
Aron Virginas-Tarb2801962019-09-30 11:24:53 +010036#include "workloads/ClDepthToSpaceWorkload.hpp"
Matthew Benthamd8777392018-10-08 09:38:55 +010037#include "workloads/ClDepthwiseConvolutionWorkload.hpp"
Aron Virginas-Tarb2801962019-09-30 11:24:53 +010038#include "workloads/ClDequantizeWorkload.hpp"
Teresa Charline11e63d2021-04-21 12:56:45 +010039#include "workloads/ClDivisionWorkload.hpp"
John Mcloughlin34c1c382023-05-17 15:08:36 +010040#include "workloads/ClElementwiseBinaryWorkload.hpp"
Sadik Armagan9fabf432020-05-27 13:40:58 +010041#include "workloads/ClExpWorkload.hpp"
Sadik Armagan66aecb02020-06-24 11:42:20 +010042#include "workloads/ClFillWorkload.hpp"
Sadik Armagan9be49162019-10-30 16:15:26 +000043#include "workloads/ClFloorFloatWorkload.hpp"
David Beckac42efd2018-09-26 17:41:13 +010044#include "workloads/ClFullyConnectedWorkload.hpp"
Teresa Charlin9ad2e5b2020-04-10 22:34:48 +010045#include "workloads/ClGatherWorkload.hpp"
Teresa Charlin989e2f62022-04-27 16:26:11 +010046#include "workloads/ClGatherNdWorkload.hpp"
Aron Virginas-Tar8168f402019-10-04 13:10:16 +010047#include "workloads/ClInstanceNormalizationWorkload.hpp"
arovir01085f0a42018-10-08 14:48:19 +010048#include "workloads/ClL2NormalizationFloatWorkload.hpp"
Teresa Charlin50de4fa2021-05-31 18:47:33 +010049#include "workloads/ClLogWorkload.hpp"
Teresa Charlin8398edc2020-07-20 14:23:02 +010050#include "workloads/ClLogSoftmaxWorkload.hpp"
James Conroyfe3ec942020-11-18 14:20:53 +000051#include "workloads/ClLogicalAndWorkload.hpp"
52#include "workloads/ClLogicalNotWorkload.hpp"
53#include "workloads/ClLogicalOrWorkload.hpp"
arovir01085f0a42018-10-08 14:48:19 +010054#include "workloads/ClLstmFloatWorkload.hpp"
keidav01a959ee52018-12-19 10:04:58 +000055#include "workloads/ClMaximumWorkload.hpp"
Matteo Martincigh28dcab62018-10-19 16:40:03 +010056#include "workloads/ClMeanWorkload.hpp"
Jim Flynn69059412019-05-17 13:03:57 +010057#include "workloads/ClConcatWorkload.hpp"
saoste019292aa32019-01-08 13:55:59 +000058#include "workloads/ClMinimumWorkload.hpp"
arovir01085f0a42018-10-08 14:48:19 +010059#include "workloads/ClMultiplicationWorkload.hpp"
Sadik Armaganac472102020-03-24 09:54:36 +000060#include "workloads/ClNegWorkload.hpp"
David Beckac42efd2018-09-26 17:41:13 +010061#include "workloads/ClNormalizationFloatWorkload.hpp"
arovir01085f0a42018-10-08 14:48:19 +010062#include "workloads/ClPadWorkload.hpp"
63#include "workloads/ClPermuteWorkload.hpp"
Nattapat Chaimanowongac9e0962018-10-10 17:18:35 +010064#include "workloads/ClPooling2dWorkload.hpp"
Ryan OSheabab8fa92022-03-09 10:29:02 +000065#include "workloads/ClPooling3dWorkload.hpp"
Nikhil Raj91e4c6d2019-07-05 12:22:58 +010066#include "workloads/ClPreluWorkload.hpp"
Ryan OShea2323af42020-05-13 16:36:19 +010067#include "workloads/ClQLstmWorkload.hpp"
68#include "workloads/ClQuantizedLstmWorkload.hpp"
69#include "workloads/ClQuantizeWorkload.hpp"
Sadik Armagana2747482021-02-09 10:28:54 +000070#include "workloads/ClReduceWorkload.hpp"
Kevin Maya023c402019-12-12 17:28:05 +000071#include "workloads/ClReshapeWorkload.hpp"
Aron Virginas-Tarcc0cefb2019-07-02 17:25:47 +010072#include "workloads/ClResizeWorkload.hpp"
Aron Virginas-Tar1a763dd2019-09-10 12:32:08 +010073#include "workloads/ClRsqrtWorkload.hpp"
Teresa Charlin50de4fa2021-05-31 18:47:33 +010074#include "workloads/ClSinWorkload.hpp"
Aron Virginas-Tar94c4fef2019-11-25 15:37:08 +000075#include "workloads/ClSliceWorkload.hpp"
Teresa Charlinc1f6b092020-05-11 16:10:38 +010076#include "workloads/ClSoftmaxWorkload.hpp"
Sadik Armaganf4464322018-12-20 16:19:12 +000077#include "workloads/ClSpaceToBatchNdWorkload.hpp"
James Conroyd2aa85e2019-07-01 17:12:40 +010078#include "workloads/ClSpaceToDepthWorkload.hpp"
Narumol Prangnawarat74135832019-05-23 15:07:33 +010079#include "workloads/ClSplitterWorkload.hpp"
Teresa Charlinaac61122022-05-05 16:11:36 +010080#include "workloads/ClSqrtWorkload.hpp"
Matthew Jacksond5166102019-07-31 14:06:28 +010081#include "workloads/ClStackWorkload.hpp"
keidav01d74dc912018-12-10 18:16:07 +000082#include "workloads/ClStridedSliceWorkload.hpp"
David Beckac42efd2018-09-26 17:41:13 +010083#include "workloads/ClSubtractionWorkload.hpp"
Cian McGriskin3b3dcbf2023-07-26 11:52:47 +010084#include "workloads/ClTileWorkload.hpp"
Aron Virginas-Tar7a3e2fe2019-06-27 18:54:47 +010085#include "workloads/ClTransposeConvolution2dWorkload.hpp"
Mike Kellyc9ea45a2020-02-28 18:11:58 +000086#include "workloads/ClTransposeWorkload.hpp"
Cathal Corbett4952a3e2022-03-03 15:14:18 +000087#include "workloads/ClUnidirectionalSequenceLstmFloatWorkload.hpp"
telsoa014fcda012018-03-09 14:13:49 +000088#endif
89
telsoa014fcda012018-03-09 14:13:49 +000090
91namespace armnn
92{
arovir017c22c702018-10-09 11:16:46 +010093
telsoa014fcda012018-03-09 14:13:49 +000094namespace
95{
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +010096
telsoa014fcda012018-03-09 14:13:49 +000097template<unsigned int FilterSize>
98bool IsMatchingSize2d(const TensorInfo& weightInfo)
99{
telsoa01c577f2c2018-08-31 09:22:23 +0100100 // Width & Height must match.
telsoa014fcda012018-03-09 14:13:49 +0000101 return (weightInfo.GetShape()[3] == FilterSize) && (weightInfo.GetShape()[2] == FilterSize);
102}
103
104template<uint32_t ValidStride>
105bool IsMatchingStride(uint32_t actualStride)
106{
107 return ValidStride == actualStride;
108}
109
110template<uint32_t FirstStride, uint32_t SecondStride, uint32_t... ValidStrides>
111bool IsMatchingStride(uint32_t actualStride)
112{
113 return IsMatchingStride<FirstStride>(actualStride) || IsMatchingStride<SecondStride, ValidStrides...>(actualStride);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100114}
telsoa014fcda012018-03-09 14:13:49 +0000115
Derek Lamberti901ea112019-12-10 22:07:09 +0000116template<typename ... Args>
117bool IsClBackendSupported(Optional<std::string&> reasonIfUnsupported, Args... args)
telsoa014fcda012018-03-09 14:13:49 +0000118{
Jan Eilers8eb25602020-03-09 12:13:48 +0000119 IgnoreUnused(reasonIfUnsupported, (args)...);
Matteo Martincighd95e9062019-01-31 15:35:59 +0000120#if defined(ARMCOMPUTECL_ENABLED)
telsoa014fcda012018-03-09 14:13:49 +0000121 return true;
122#else
arovir01085f0a42018-10-08 14:48:19 +0100123 if (reasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +0000124 {
arovir01085f0a42018-10-08 14:48:19 +0100125 reasonIfUnsupported.value() = "The armnn library has been built without CL support";
telsoa014fcda012018-03-09 14:13:49 +0000126 }
127 return false;
128#endif
129}
130
Matteo Martincighd95e9062019-01-31 15:35:59 +0000131#if defined(ARMCOMPUTECL_ENABLED)
telsoa014fcda012018-03-09 14:13:49 +0000132#define FORWARD_CL_LAYER_SUPPORT_FUNC(expr) (expr)
133#else
134#define FORWARD_CL_LAYER_SUPPORT_FUNC(expr) IsClBackendSupported(reasonIfUnsupported)
135#endif
136
Matteo Martincighd95e9062019-01-31 15:35:59 +0000137#if defined(ARMCOMPUTECL_ENABLED)
telsoa014fcda012018-03-09 14:13:49 +0000138template<class FuncType, class... Args>
arovir01085f0a42018-10-08 14:48:19 +0100139inline bool IsWorkloadSupported(FuncType&& func, Optional<std::string&> reasonIfUnsupported, Args&&... args)
telsoa014fcda012018-03-09 14:13:49 +0000140{
141 arm_compute::Status aclStatus = func(std::forward<Args>(args)...);
142 const bool supported = (aclStatus.error_code() == arm_compute::ErrorCode::OK);
143 if (!supported && reasonIfUnsupported)
144 {
arovir01085f0a42018-10-08 14:48:19 +0100145 reasonIfUnsupported.value() = aclStatus.error_description();
telsoa014fcda012018-03-09 14:13:49 +0000146 }
147 return supported;
148}
149
150#define FORWARD_WORKLOAD_VALIDATE_FUNC(func, reasonIfUnsupported, ...) \
151 return IsWorkloadSupported(func, reasonIfUnsupported, __VA_ARGS__);
152#else
153#define FORWARD_WORKLOAD_VALIDATE_FUNC(func, reasonIfUnsupported, ...) \
Derek Lamberti901ea112019-12-10 22:07:09 +0000154 return IsClBackendSupported(reasonIfUnsupported, __VA_ARGS__);
telsoa014fcda012018-03-09 14:13:49 +0000155#endif
156
telsoa01c577f2c2018-08-31 09:22:23 +0100157template<typename FloatFunc, typename Uint8Func, typename ... Params>
arovir01085f0a42018-10-08 14:48:19 +0100158bool IsSupportedForDataTypeCl(Optional<std::string&> reasonIfUnsupported,
telsoa014fcda012018-03-09 14:13:49 +0000159 DataType dataType,
telsoa01c577f2c2018-08-31 09:22:23 +0100160 FloatFunc floatFuncPtr,
telsoa014fcda012018-03-09 14:13:49 +0000161 Uint8Func uint8FuncPtr,
162 Params&&... params)
163{
164 return IsClBackendSupported(reasonIfUnsupported) &&
165 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
166 dataType,
167 floatFuncPtr,
telsoa01c577f2c2018-08-31 09:22:23 +0100168 floatFuncPtr,
telsoa014fcda012018-03-09 14:13:49 +0000169 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +0000170 &FalseFunc<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000171 &FalseFunc<>,
telsoa014fcda012018-03-09 14:13:49 +0000172 std::forward<Params>(params)...);
173}
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100174} // anonymous namespace
175
Sadik Armagan045f6be2020-09-10 13:37:32 +0100176ClLayerSupport::ClLayerSupport(const IBackendInternal::IBackendSpecificModelContextPtr& modelContextPtr)
177 : m_ModelContextPtr(modelContextPtr)
178{
179}
180
181ClLayerSupport::ClLayerSupport()
182 : m_ModelContextPtr(nullptr)
183{
184}
185
Cathal Corbett34b429c2021-12-24 12:24:40 +0000186bool ClLayerSupport::IsLayerSupported(const LayerType& type,
187 const std::vector<TensorInfo>& infos,
188 const BaseDescriptor& descriptor,
189 const Optional<LstmInputParamsInfo>& lstmParamsInfo,
190 const Optional<QuantizedLstmInputParamsInfo>& quantizedLstmParamsInfo,
191 Optional<std::string&> reasonIfUnsupported) const
192{
193 switch (type)
194 {
195 case LayerType::Activation:
196 return IsActivationSupported(infos[0],
197 infos[1],
198 *(PolymorphicDowncast<const ActivationDescriptor*>(&descriptor)),
199 reasonIfUnsupported);
200 case LayerType::Addition:
Mike Kelly2c14db62023-03-15 15:06:23 +0000201 ARMNN_NO_DEPRECATE_WARN_BEGIN
Cathal Corbett34b429c2021-12-24 12:24:40 +0000202 return IsAdditionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Mike Kelly2c14db62023-03-15 15:06:23 +0000203 ARMNN_NO_DEPRECATE_WARN_END
Cathal Corbett34b429c2021-12-24 12:24:40 +0000204 case LayerType::ArgMinMax:
205 return IsArgMinMaxSupported(infos[0],
206 infos[1],
207 *(PolymorphicDowncast<const ArgMinMaxDescriptor*>(&descriptor)),
208 reasonIfUnsupported);
Teresa Charlin94916a52022-10-19 08:48:07 +0100209 case LayerType::BatchMatMul:
210 return IsBatchMatMulSupported(infos[0],
211 infos[1],
212 infos[2],
213 *(PolymorphicDowncast<const BatchMatMulDescriptor*>(&descriptor)),
214 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000215 case LayerType::BatchNormalization:
216 return IsBatchNormalizationSupported(infos[0],
217 infos[1],
218 infos[2],
219 infos[3],
220 infos[4],
221 infos[5],
222 *(PolymorphicDowncast<const BatchNormalizationDescriptor*>
223 (&descriptor)),
224 reasonIfUnsupported);
225 case LayerType::BatchToSpaceNd:
226 return IsBatchToSpaceNdSupported(infos[0],
227 infos[1],
228 *(PolymorphicDowncast<const BatchToSpaceNdDescriptor*>(&descriptor)),
229 reasonIfUnsupported);
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000230 case LayerType::Cast:
231 return IsCastSupported(infos[0], infos[1], reasonIfUnsupported);
232 case LayerType::ChannelShuffle:
233 return IsChannelShuffleSupported(infos[0],
234 infos[1],
235 *(PolymorphicDowncast<const ChannelShuffleDescriptor*>(&descriptor)),
236 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000237 case LayerType::Comparison:
238 return IsComparisonSupported(infos[0],
239 infos[1],
240 infos[2],
241 *(PolymorphicDowncast<const ComparisonDescriptor*>(&descriptor)),
242 reasonIfUnsupported);
243 case LayerType::Concat:
244 {
245 std::vector<const TensorInfo*> inputInfos;
246 for (uint32_t i = 0; i < (infos.size() - 1); i++)
247 {
248 inputInfos.push_back(&infos[i]);
249 }
250 return IsConcatSupported(inputInfos,
251 infos[infos.size() - 1],
252 *(PolymorphicDowncast<const OriginsDescriptor*>(&descriptor)),
253 reasonIfUnsupported);
254 }
255 case LayerType::Constant:
256 return IsConstantSupported(infos[0], reasonIfUnsupported);
257 case LayerType::ConvertFp16ToFp32:
258 return IsConvertFp16ToFp32Supported(infos[0], infos[1], reasonIfUnsupported);
259 case LayerType::ConvertFp32ToFp16:
260 return IsConvertFp32ToFp16Supported(infos[0], infos[1], reasonIfUnsupported);
261 case LayerType::Convolution2d:
262 {
263 if (infos.size() != 4)
264 {
265 throw InvalidArgumentException("Invalid number of Convolution2d TensorInfos. "
266 "TensorInfos should be of format: {input, output, weights, biases}.");
267 }
268
269 auto desc = *(PolymorphicDowncast<const Convolution2dDescriptor*>(&descriptor));
270 if (infos[3] == TensorInfo())
271 {
272 return IsConvolution2dSupported(infos[0],
273 infos[1],
274 desc,
275 infos[2],
276 EmptyOptional(),
277 reasonIfUnsupported);
278 }
279 else
280 {
281 return IsConvolution2dSupported(infos[0],
282 infos[1],
283 desc,
284 infos[2],
285 infos[3],
286 reasonIfUnsupported);
287 }
288 }
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000289 case LayerType::Convolution3d:
290 {
291 if (infos.size() != 4)
292 {
293 throw InvalidArgumentException("Invalid number of Convolution3d TensorInfos. "
294 "TensorInfos should be of format: {input, output, weights, biases}.");
295 }
296
297 auto desc = *(PolymorphicDowncast<const Convolution3dDescriptor*>(&descriptor));
298 if (infos[3] == TensorInfo())
299 {
300 return IsConvolution3dSupported(infos[0],
301 infos[1],
302 desc,
303 infos[2],
304 EmptyOptional(),
305 reasonIfUnsupported);
306 }
307 else
308 {
309 return IsConvolution3dSupported(infos[0],
310 infos[1],
311 desc,
312 infos[2],
313 infos[3],
314 reasonIfUnsupported);
315 }
316 }
Cathal Corbett34b429c2021-12-24 12:24:40 +0000317 case LayerType::DepthToSpace:
318 return IsDepthToSpaceSupported(infos[0],
319 infos[1],
320 *(PolymorphicDowncast<const DepthToSpaceDescriptor*>(&descriptor)),
321 reasonIfUnsupported);
322 case LayerType::DepthwiseConvolution2d:
323 {
324 if (infos.size() != 4)
325 {
326 throw InvalidArgumentException("Invalid number of DepthwiseConvolution2d TensorInfos. "
327 "TensorInfos should be of format: {input, output, weights, biases}.");
328 }
329
330 auto desc = *(PolymorphicDowncast<const DepthwiseConvolution2dDescriptor*>(&descriptor));
331 if (infos[3] == TensorInfo())
332 {
333 return IsDepthwiseConvolutionSupported(infos[0],
334 infos[1],
335 desc,
336 infos[2],
337 EmptyOptional(),
338 reasonIfUnsupported);
339 }
340 else
341 {
342 return IsDepthwiseConvolutionSupported(infos[0],
343 infos[1],
344 desc,
345 infos[2],
346 infos[3],
347 reasonIfUnsupported);
348 }
349 }
350 case LayerType::Dequantize:
351 return IsDequantizeSupported(infos[0], infos[1], reasonIfUnsupported);
352 case LayerType::Division:
Mike Kelly2c14db62023-03-15 15:06:23 +0000353 ARMNN_NO_DEPRECATE_WARN_BEGIN
Cathal Corbett34b429c2021-12-24 12:24:40 +0000354 return IsDivisionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Mike Kelly2c14db62023-03-15 15:06:23 +0000355 ARMNN_NO_DEPRECATE_WARN_END
Mike Kelly3ec30772023-03-08 13:47:17 +0000356 case LayerType::ElementwiseBinary:
357 {
358 auto desc = *(PolymorphicDowncast<const ElementwiseBinaryDescriptor *>(&descriptor));
359
360 switch (desc.m_Operation)
361 {
362 case BinaryOperation::Add:
363 FORWARD_WORKLOAD_VALIDATE_FUNC(ClAdditionValidate,
364 reasonIfUnsupported,
365 infos[0],
366 infos[1],
367 infos[2],
368 nullptr);
369 case BinaryOperation::Div:
370 FORWARD_WORKLOAD_VALIDATE_FUNC(ClDivisionWorkloadValidate,
371 reasonIfUnsupported,
372 infos[0],
373 infos[1],
374 infos[2],
375 nullptr);
376 case BinaryOperation::Minimum:
377 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMinimumWorkloadValidate,
378 reasonIfUnsupported,
379 infos[0],
380 infos[1],
381 infos[2]);
382 case BinaryOperation::Maximum:
383 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMaximumWorkloadValidate,
384 reasonIfUnsupported,
385 infos[0],
386 infos[1],
387 infos[2]);
388 case BinaryOperation::Mul:
389 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMultiplicationWorkloadValidate,
390 reasonIfUnsupported,
391 infos[0],
392 infos[1],
393 infos[2],
394 nullptr);
John Mcloughlin34c1c382023-05-17 15:08:36 +0100395 case BinaryOperation::Power:
396 case BinaryOperation::SqDiff:
397 FORWARD_WORKLOAD_VALIDATE_FUNC(ClElementwiseBinaryValidate,
398 reasonIfUnsupported,
399 infos[0],
400 infos[1],
401 infos[2],
402 desc,
403 nullptr);
Mike Kelly3ec30772023-03-08 13:47:17 +0000404 case BinaryOperation::Sub:
405 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSubtractionValidate,
406 reasonIfUnsupported,
407 infos[0],
408 infos[1],
409 infos[2],
410 nullptr);
411 default:
412 return false;
413 }
414 }
Cathal Corbett34b429c2021-12-24 12:24:40 +0000415 case LayerType::ElementwiseUnary:
416 return IsElementwiseUnarySupported(infos[0],
417 infos[1],
418 *(PolymorphicDowncast<const ElementwiseUnaryDescriptor*>(&descriptor)),
419 reasonIfUnsupported);
420 case LayerType::Fill:
421 return IsFillSupported(infos[0],
422 infos[1],
423 *(PolymorphicDowncast<const FillDescriptor*>(&descriptor)),
424 reasonIfUnsupported);
425 case LayerType::Floor:
426 return IsFloorSupported(infos[0], infos[1], reasonIfUnsupported);
427 case LayerType::FullyConnected:
428 return IsFullyConnectedSupported(infos[0],
429 infos[1],
430 infos[2],
431 infos[3],
432 *(PolymorphicDowncast<const FullyConnectedDescriptor*>(&descriptor)),
433 reasonIfUnsupported);
434 case LayerType::Gather:
435 return IsGatherSupported(infos[0],
436 infos[1],
437 infos[2],
438 *(PolymorphicDowncast<const GatherDescriptor*>(&descriptor)),
439 reasonIfUnsupported);
Teresa Charlin989e2f62022-04-27 16:26:11 +0100440 case LayerType::GatherNd:
441 return IsGatherNdSupported(infos[0],
442 infos[1],
443 infos[2],
444 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000445 case LayerType::Input:
446 return IsInputSupported(infos[0], reasonIfUnsupported);
447 case LayerType::InstanceNormalization:
448 return IsInstanceNormalizationSupported(infos[0],
449 infos[1],
450 *(PolymorphicDowncast<const InstanceNormalizationDescriptor*>
451 (&descriptor)),
452 reasonIfUnsupported);
453 case LayerType::L2Normalization:
454 return IsL2NormalizationSupported(infos[0],
455 infos[1],
456 *(PolymorphicDowncast<const L2NormalizationDescriptor*>(&descriptor)),
457 reasonIfUnsupported);
458 case LayerType::LogicalBinary:
459 return IsLogicalBinarySupported(infos[0],
460 infos[1],
461 infos[2],
462 *(PolymorphicDowncast<const LogicalBinaryDescriptor*>(&descriptor)),
463 reasonIfUnsupported);
464 case LayerType::LogSoftmax:
465 return IsLogSoftmaxSupported(infos[0],
466 infos[1],
467 *(PolymorphicDowncast<const LogSoftmaxDescriptor*>(&descriptor)),
468 reasonIfUnsupported);
469 case LayerType::Lstm:
470 return IsLstmSupported(infos[0],
471 infos[1],
472 infos[2],
473 infos[3],
474 infos[4],
475 infos[5],
476 infos[6],
477 *(PolymorphicDowncast<const LstmDescriptor*>(&descriptor)),
478 lstmParamsInfo.value(),
479 reasonIfUnsupported);
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000480 case LayerType::Map:
481 return true;
482 case LayerType::MemCopy:
483 return LayerSupportBase::IsMemCopySupported(infos[0], infos[1], reasonIfUnsupported);
484 case LayerType::MemImport:
485 return LayerSupportBase::IsMemImportSupported(infos[0], infos[1], reasonIfUnsupported);
486 case LayerType::Merge:
487 return LayerSupportBase::IsMergeSupported(infos[0],
488 infos[1],
489 infos[2],
490 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000491 case LayerType::Maximum:
Mike Kelly2c14db62023-03-15 15:06:23 +0000492 ARMNN_NO_DEPRECATE_WARN_BEGIN
Cathal Corbett34b429c2021-12-24 12:24:40 +0000493 return IsMaximumSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Mike Kelly2c14db62023-03-15 15:06:23 +0000494 ARMNN_NO_DEPRECATE_WARN_END
Cathal Corbett34b429c2021-12-24 12:24:40 +0000495 case LayerType::Mean:
496 return IsMeanSupported(infos[0],
497 infos[1],
498 *(PolymorphicDowncast<const MeanDescriptor*>(&descriptor)),
499 reasonIfUnsupported);
500 case LayerType::Minimum:
Mike Kelly2c14db62023-03-15 15:06:23 +0000501 ARMNN_NO_DEPRECATE_WARN_BEGIN
Cathal Corbett34b429c2021-12-24 12:24:40 +0000502 return IsMinimumSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Mike Kelly2c14db62023-03-15 15:06:23 +0000503 ARMNN_NO_DEPRECATE_WARN_END
Cathal Corbett34b429c2021-12-24 12:24:40 +0000504 case LayerType::Multiplication:
Mike Kelly2c14db62023-03-15 15:06:23 +0000505 ARMNN_NO_DEPRECATE_WARN_BEGIN
Cathal Corbett34b429c2021-12-24 12:24:40 +0000506 return IsMultiplicationSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Mike Kelly2c14db62023-03-15 15:06:23 +0000507 ARMNN_NO_DEPRECATE_WARN_END
Cathal Corbett34b429c2021-12-24 12:24:40 +0000508 case LayerType::Normalization:
509 return IsNormalizationSupported(infos[0],
510 infos[1],
511 *(PolymorphicDowncast<const NormalizationDescriptor*>(&descriptor)),
512 reasonIfUnsupported);
513 case LayerType::Output:
514 return IsOutputSupported(infos[0], reasonIfUnsupported);
515 case LayerType::Pad:
516 return IsPadSupported(infos[0],
517 infos[1],
518 *(PolymorphicDowncast<const PadDescriptor*>(&descriptor)),
519 reasonIfUnsupported);
520 case LayerType::Permute:
521 return IsPermuteSupported(infos[0],
522 infos[1],
523 *(PolymorphicDowncast<const PermuteDescriptor*>(&descriptor)),
524 reasonIfUnsupported);
525 case LayerType::Pooling2d:
526 return IsPooling2dSupported(infos[0],
527 infos[1],
528 *(PolymorphicDowncast<const Pooling2dDescriptor*>(&descriptor)),
529 reasonIfUnsupported);
Ryan OSheabab8fa92022-03-09 10:29:02 +0000530 case LayerType::Pooling3d:
531 return IsPooling3dSupported(infos[0],
532 infos[1],
533 *(PolymorphicDowncast<const Pooling3dDescriptor*>(&descriptor)),
534 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000535 case LayerType::Prelu:
536 return IsPreluSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000537 case LayerType::QLstm:
538 return IsQLstmSupported(infos[0],
539 infos[1],
540 infos[2],
541 infos[3],
542 infos[4],
543 infos[5],
544 *(PolymorphicDowncast<const QLstmDescriptor*>(&descriptor)),
545 lstmParamsInfo.value(),
546 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000547 case LayerType::Quantize:
548 return IsQuantizeSupported(infos[0], infos[1], reasonIfUnsupported);
549 case LayerType::QuantizedLstm:
550 return IsQuantizedLstmSupported(infos[0],
551 infos[1],
552 infos[2],
553 infos[3],
554 infos[4],
555 quantizedLstmParamsInfo.value(),
556 reasonIfUnsupported);
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000557 case LayerType::Rank:
558 return true;
559 case LayerType::Reduce:
560 return IsReduceSupported(infos[0],
561 infos[1],
562 *(PolymorphicDowncast<const ReduceDescriptor*>(&descriptor)),
563 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000564 case LayerType::Reshape:
565 return IsReshapeSupported(infos[0],
566 infos[1],
567 *(PolymorphicDowncast<const ReshapeDescriptor*>(&descriptor)),
568 reasonIfUnsupported);
569 case LayerType::Resize:
570 return IsResizeSupported(infos[0],
571 infos[1],
572 *(PolymorphicDowncast<const ResizeDescriptor*>(&descriptor)),
573 reasonIfUnsupported);
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000574 case LayerType::Shape:
575 return LayerSupportBase::IsShapeSupported(infos[0],
576 infos[1],
577 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000578 case LayerType::Slice:
579 return IsSliceSupported(infos[0],
580 infos[1],
581 *(PolymorphicDowncast<const SliceDescriptor*>(&descriptor)),
582 reasonIfUnsupported);
583 case LayerType::Softmax:
584 return IsSoftmaxSupported(infos[0],
585 infos[1],
586 *(PolymorphicDowncast<const SoftmaxDescriptor*>(&descriptor)),
587 reasonIfUnsupported);
588 case LayerType::SpaceToBatchNd:
589 return IsSpaceToBatchNdSupported(infos[0],
590 infos[1],
591 *(PolymorphicDowncast<const SpaceToBatchNdDescriptor*>(&descriptor)),
592 reasonIfUnsupported);
593 case LayerType::SpaceToDepth:
594 return IsSpaceToDepthSupported(infos[0],
595 infos[1],
596 *(PolymorphicDowncast<const SpaceToDepthDescriptor*>(&descriptor)),
597 reasonIfUnsupported);
598 case LayerType::Splitter:
599 {
600 std::vector<TensorInfo> outputInfos;
601 for (uint32_t i = 1; i < infos.size(); i++)
602 {
603 outputInfos.push_back(infos[i]);
604 }
605 return IsSplitterSupported(infos[0],
606 {outputInfos.begin(), outputInfos.end()},
607 *(PolymorphicDowncast<const ViewsDescriptor*>(&descriptor)),
608 reasonIfUnsupported);
609 }
610 case LayerType::Stack:
611 {
612 std::vector<const TensorInfo*> inputInfos;
613 for (uint32_t i = 0; i < infos.size() - 1; i++)
614 {
615 inputInfos.push_back(&infos[i]);
616 }
617 return IsStackSupported(inputInfos,
618 infos[infos.size() - 1],
619 *(PolymorphicDowncast<const StackDescriptor*>(&descriptor)),
620 reasonIfUnsupported);
621 }
622 case LayerType::StridedSlice:
623 return IsStridedSliceSupported(infos[0],
624 infos[1],
625 *(PolymorphicDowncast<const StridedSliceDescriptor*>(&descriptor)),
626 reasonIfUnsupported);
627 case LayerType::Subtraction:
Mike Kelly2c14db62023-03-15 15:06:23 +0000628 ARMNN_NO_DEPRECATE_WARN_BEGIN
Cathal Corbett34b429c2021-12-24 12:24:40 +0000629 return IsSubtractionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
Mike Kelly2c14db62023-03-15 15:06:23 +0000630 ARMNN_NO_DEPRECATE_WARN_END
Cian McGriskin3b3dcbf2023-07-26 11:52:47 +0100631 case LayerType::Tile:
632 return IsTileSupported(infos[0],
633 infos[1],
634 *(PolymorphicDowncast<const TileDescriptor*>(&descriptor)),
635 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000636 case LayerType::Transpose:
637 return IsTransposeSupported(infos[0],
638 infos[1],
639 *(PolymorphicDowncast<const TransposeDescriptor*>(&descriptor)),
640 reasonIfUnsupported);
641 case LayerType::TransposeConvolution2d:
642 {
643 if (infos.size() != 4)
644 {
645 throw InvalidArgumentException("Invalid number of TransposeConvolution2d TensorInfos. "
646 "TensorInfos should be of format: {input, output, weights, biases}.");
647 }
648
649 auto desc = *(PolymorphicDowncast<const TransposeConvolution2dDescriptor*>(&descriptor));
650 if (infos[3] == TensorInfo())
651 {
652 return IsTransposeConvolution2dSupported(infos[0],
653 infos[1],
654 desc,
655 infos[2],
656 EmptyOptional(),
657 reasonIfUnsupported);
658 }
659 else
660 {
661 return IsTransposeConvolution2dSupported(infos[0],
662 infos[1],
663 desc,
664 infos[2],
665 infos[3],
666 reasonIfUnsupported);
667 }
668 }
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000669 case LayerType::UnidirectionalSequenceLstm:
670 return IsUnidirectionalSequenceLstmSupported(infos[0],
671 infos[1],
672 infos[2],
673 infos[3],
674 infos[4],
675 infos[5],
676 *(PolymorphicDowncast<const
677 UnidirectionalSequenceLstmDescriptor*>(&descriptor)),
678 lstmParamsInfo.value(),
679 reasonIfUnsupported);
Cathal Corbett34b429c2021-12-24 12:24:40 +0000680 case LayerType::Unmap:
681 return true;
Cathal Corbett34b429c2021-12-24 12:24:40 +0000682 default:
683 // layers not supported in cl by default:
Cathal Corbett4952a3e2022-03-03 15:14:18 +0000684 // debug, detectionpostprocess, fakequantization,
Teresa Charlin9145e382023-08-17 18:44:58 +0100685 // precompiled, standin, switch, pooling3d, fused
Cathal Corbett34b429c2021-12-24 12:24:40 +0000686 return false;
687 }
688}
689
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100690bool ClLayerSupport::IsActivationSupported(const TensorInfo& input,
691 const TensorInfo& output,
692 const ActivationDescriptor& descriptor,
693 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000694{
telsoa01c577f2c2018-08-31 09:22:23 +0100695 FORWARD_WORKLOAD_VALIDATE_FUNC(ClActivationWorkloadValidate,
696 reasonIfUnsupported,
697 input,
698 output,
699 descriptor);
telsoa014fcda012018-03-09 14:13:49 +0000700}
701
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100702bool ClLayerSupport::IsAdditionSupported(const TensorInfo& input0,
703 const TensorInfo& input1,
704 const TensorInfo& output,
705 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000706{
arovir01085f0a42018-10-08 14:48:19 +0100707 FORWARD_WORKLOAD_VALIDATE_FUNC(ClAdditionValidate,
708 reasonIfUnsupported,
709 input0,
710 input1,
Mike Kelly07810fc2020-11-12 10:58:48 +0000711 output,
712 nullptr);
telsoa014fcda012018-03-09 14:13:49 +0000713}
714
James Conroy2dc05722019-09-19 17:00:31 +0100715bool ClLayerSupport::IsArgMinMaxSupported(const TensorInfo& input,
716 const TensorInfo& output,
717 const ArgMinMaxDescriptor& descriptor,
718 Optional<std::string&> reasonIfUnsupported) const
719{
Francis Murtagh52ec3462019-11-19 12:24:19 +0000720
James Conroy2dc05722019-09-19 17:00:31 +0100721 FORWARD_WORKLOAD_VALIDATE_FUNC(ClArgMinMaxWorkloadValidate,
722 reasonIfUnsupported,
723 input,
724 output,
725 descriptor);
726}
727
Teresa Charlin94916a52022-10-19 08:48:07 +0100728bool ClLayerSupport::IsBatchMatMulSupported(const TensorInfo& inputX,
729 const TensorInfo& inputY,
730 const TensorInfo& output,
731 const BatchMatMulDescriptor& descriptor,
732 Optional<std::string&> reasonIfUnsupported) const
733{
734 FORWARD_WORKLOAD_VALIDATE_FUNC(ClBatchMatMulValidate,
735 reasonIfUnsupported,
736 inputX,
737 inputY,
738 output,
Teresa Charlin97a3aef2023-01-10 10:32:51 +0000739 descriptor,
740 nullptr);
Teresa Charlin94916a52022-10-19 08:48:07 +0100741}
742
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100743bool ClLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
744 const TensorInfo& output,
745 const TensorInfo& mean,
746 const TensorInfo& var,
747 const TensorInfo& beta,
748 const TensorInfo& gamma,
749 const BatchNormalizationDescriptor& descriptor,
750 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000751{
telsoa01c577f2c2018-08-31 09:22:23 +0100752 FORWARD_WORKLOAD_VALIDATE_FUNC(ClBatchNormalizationValidate,
753 reasonIfUnsupported,
754 input,
755 output,
756 mean,
757 var,
758 beta,
759 gamma,
Mike Kelly07810fc2020-11-12 10:58:48 +0000760 descriptor,
761 nullptr);
telsoa014fcda012018-03-09 14:13:49 +0000762}
763
Teresa Charlin1222dbd2021-09-02 13:58:52 +0100764bool ClLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
765 const TensorInfo& output,
766 const BatchToSpaceNdDescriptor& descriptor,
767 Optional<std::string&> reasonIfUnsupported) const
768{
769 FORWARD_WORKLOAD_VALIDATE_FUNC(ClBatchToSpaceNdWorkloadValidate,
770 reasonIfUnsupported,
771 input,
772 output,
773 descriptor);
774}
775
Sadik Armaganf40d6d42021-04-22 09:12:11 +0100776bool ClLayerSupport::IsCastSupported(const TensorInfo& input,
777 const TensorInfo& output,
778 Optional<std::string&> reasonIfUnsupported) const
779{
780 FORWARD_WORKLOAD_VALIDATE_FUNC(ClCastValidate,
781 reasonIfUnsupported,
782 input,
783 output);
784}
785
Teresa Charlin1222dbd2021-09-02 13:58:52 +0100786bool ClLayerSupport::IsChannelShuffleSupported(const TensorInfo& input,
Mike Kelly831faed2018-11-28 11:52:08 +0000787 const TensorInfo& output,
Teresa Charlin1222dbd2021-09-02 13:58:52 +0100788 const ChannelShuffleDescriptor& descriptor,
Mike Kelly831faed2018-11-28 11:52:08 +0000789 Optional<std::string&> reasonIfUnsupported) const
790{
Teresa Charlin1222dbd2021-09-02 13:58:52 +0100791 FORWARD_WORKLOAD_VALIDATE_FUNC(ClChannelShuffleValidate,
Mike Kelly831faed2018-11-28 11:52:08 +0000792 reasonIfUnsupported,
793 input,
794 output,
795 descriptor);
796}
797
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100798bool ClLayerSupport::IsComparisonSupported(const TensorInfo& input0,
799 const TensorInfo& input1,
800 const TensorInfo& output,
801 const ComparisonDescriptor& descriptor,
802 Optional<std::string&> reasonIfUnsupported) const
803{
Teresa Charlin2b030d92020-03-27 16:40:56 +0000804 FORWARD_WORKLOAD_VALIDATE_FUNC(ClComparisonWorkloadValidate,
805 reasonIfUnsupported,
806 input0,
807 input1,
808 output,
809 descriptor);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100810}
811
Jim Flynn906f9462019-05-10 13:55:21 +0100812bool ClLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
813 const TensorInfo& output,
Cathal Corbett34b429c2021-12-24 12:24:40 +0000814 const OriginsDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100815 Optional<std::string&> reasonIfUnsupported) const
816{
Jim Flynne242f2d2019-05-22 14:24:13 +0100817 if (descriptor.GetNumDimensions() <= descriptor.GetConcatAxis())
818 {
819 SetValueChecked(reasonIfUnsupported, "Cl Concat: Concat axis > Number of dimensions.");
820 return false;
821 }
822
823 unsigned int concatInnerAxis = (descriptor.GetNumDimensions() - descriptor.GetConcatAxis()) - 1;
824 if(concatInnerAxis < 3) // Width, height, or channels
825 {
826 FORWARD_WORKLOAD_VALIDATE_FUNC(ClConcatWorkloadValidate,
827 reasonIfUnsupported,
828 inputs,
829 output,
830 descriptor);
831 }
832 else if (concatInnerAxis == 3)
833 {
834 // We rely on the sub-tensor optimization to handle the batch dimension for 4D tensors. If we can't use
835 // sub-tensors for this then we can't support it. Here is where we check that the sub-tensors will work.
836 for (auto& input : inputs)
837 {
838 if (input && !output.IsTypeSpaceMatch(*input)) // Cannot use sub-tensors if the types are not same space
839 {
840 SetValueChecked(reasonIfUnsupported, "Cl Concat: Types and quantization parameters must match.");
841 return false;
842 }
843 }
844 return true; // Sub-tensors support concat along batch
845 }
846 else // > 4 dimensions not supported.
847 {
848 SetValueChecked(reasonIfUnsupported, "Cl Concat: Maximum of 4 dimensions supported.");
849 return false;
850 }
Jim Flynn906f9462019-05-10 13:55:21 +0100851}
852
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100853bool ClLayerSupport::IsConstantSupported(const TensorInfo& output,
854 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000855{
Mike Kelly0886ac42020-04-27 09:55:40 +0100856 FORWARD_WORKLOAD_VALIDATE_FUNC(ClConstantWorkloadValidate,
857 reasonIfUnsupported,
858 output);
telsoa014fcda012018-03-09 14:13:49 +0000859}
860
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100861bool ClLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
862 const TensorInfo& output,
863 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000864{
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100865 FORWARD_WORKLOAD_VALIDATE_FUNC(ClConvertFp16ToFp32WorkloadValidate,
866 reasonIfUnsupported,
867 input,
868 output);
telsoa014fcda012018-03-09 14:13:49 +0000869}
870
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100871bool ClLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
872 const TensorInfo& output,
873 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000874{
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100875 FORWARD_WORKLOAD_VALIDATE_FUNC(ClConvertFp32ToFp16WorkloadValidate,
876 reasonIfUnsupported,
877 input,
878 output);
telsoa014fcda012018-03-09 14:13:49 +0000879}
880
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100881bool ClLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
882 const TensorInfo& output,
883 const Convolution2dDescriptor& descriptor,
884 const TensorInfo& weights,
885 const Optional<TensorInfo>& biases,
886 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000887{
Sadik Armagan045f6be2020-09-10 13:37:32 +0100888 bool isFastMathEnabled = false;
889#if defined(ARMCOMPUTECL_ENABLED)
890 if (m_ModelContextPtr)
891 {
892 if (m_ModelContextPtr.get() != nullptr)
893 {
894 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
895 if (modelOptions)
896 {
897 isFastMathEnabled = modelOptions->IsFastMathEnabled();
898 }
899 }
900 }
901#endif
902
surmeh013537c2c2018-05-18 16:31:43 +0100903 FORWARD_WORKLOAD_VALIDATE_FUNC(ClConvolution2dWorkloadValidate,
904 reasonIfUnsupported,
905 input,
906 output,
907 descriptor,
908 weights,
Sadik Armagan045f6be2020-09-10 13:37:32 +0100909 biases,
Mike Kelly07810fc2020-11-12 10:58:48 +0000910 isFastMathEnabled,
911 nullptr);
telsoa014fcda012018-03-09 14:13:49 +0000912}
913
Teresa Charlin615ad6c2021-10-26 12:22:20 +0100914bool ClLayerSupport::IsConvolution3dSupported(const TensorInfo& input,
915 const TensorInfo& output,
916 const Convolution3dDescriptor& descriptor,
917 const TensorInfo& weights,
918 const Optional<TensorInfo>& biases,
919 Optional<std::string&> reasonIfUnsupported) const
920{
921 bool isFastMathEnabled = false;
922#if defined(ARMCOMPUTECL_ENABLED)
923 if (m_ModelContextPtr)
924{
925 if (m_ModelContextPtr.get() != nullptr)
926 {
927 auto modelOptions = dynamic_cast<ClBackendModelContext*>(m_ModelContextPtr.get());
928 if (modelOptions)
929 {
930 isFastMathEnabled = modelOptions->IsFastMathEnabled();
931 }
932 }
933}
934#endif
935
936 FORWARD_WORKLOAD_VALIDATE_FUNC(ClConvolution3dWorkloadValidate,
937 reasonIfUnsupported,
938 input,
939 output,
940 descriptor,
941 weights,
942 biases,
943 isFastMathEnabled,
944 nullptr);
945}
946
Jim Flynn983daec2019-05-29 16:20:16 +0100947bool ClLayerSupport::IsDequantizeSupported(const TensorInfo& input,
948 const TensorInfo& output,
949 Optional<std::string&> reasonIfUnsupported) const
950{
951 FORWARD_WORKLOAD_VALIDATE_FUNC(ClDequantizeWorkloadValidate,
952 reasonIfUnsupported,
953 input,
954 output);
955}
956
Aron Virginas-Tarb2801962019-09-30 11:24:53 +0100957bool ClLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
958 const TensorInfo& output,
959 const DepthToSpaceDescriptor& descriptor,
960 Optional<std::string&> reasonIfUnsupported) const
961{
962 FORWARD_WORKLOAD_VALIDATE_FUNC(ClDepthToSpaceWorkloadValidate,
963 reasonIfUnsupported,
964 input,
965 output,
966 descriptor);
967}
968
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +0100969bool ClLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
970 const TensorInfo& output,
971 const DepthwiseConvolution2dDescriptor& descriptor,
972 const TensorInfo& weights,
973 const Optional<TensorInfo>& biases,
974 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +0000975{
telsoa01c577f2c2018-08-31 09:22:23 +0100976 FORWARD_WORKLOAD_VALIDATE_FUNC(ClDepthwiseConvolutionWorkloadValidate,
977 reasonIfUnsupported,
978 input,
979 output,
980 descriptor,
981 weights,
Mike Kelly07810fc2020-11-12 10:58:48 +0000982 biases,
983 nullptr);
telsoa014fcda012018-03-09 14:13:49 +0000984}
985
Pablo Tellof0bd6832019-04-26 17:58:13 +0100986bool ClLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
987 const TensorInfo& output,
988 const DepthwiseConvolution2dDescriptor& descriptor,
989 const TensorInfo& weights,
990 const Optional<TensorInfo>& biases,
991 Optional<std::string&> reasonIfUnsupported) const
992{
993 FORWARD_WORKLOAD_VALIDATE_FUNC(ClDepthwiseConvolutionWorkloadValidate,
994 reasonIfUnsupported,
995 input,
996 output,
997 descriptor,
998 weights,
Mike Kelly07810fc2020-11-12 10:58:48 +0000999 biases,
1000 nullptr);
Pablo Tellof0bd6832019-04-26 17:58:13 +01001001}
1002
1003
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001004bool ClLayerSupport::IsDivisionSupported(const TensorInfo& input0,
1005 const TensorInfo& input1,
1006 const TensorInfo& output,
1007 Optional<std::string&> reasonIfUnsupported) const
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001008{
1009 FORWARD_WORKLOAD_VALIDATE_FUNC(ClDivisionWorkloadValidate,
1010 reasonIfUnsupported,
1011 input0,
1012 input1,
Mike Kelly07810fc2020-11-12 10:58:48 +00001013 output,
1014 nullptr);
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001015}
1016
josh minor4a3c6102020-01-06 16:40:46 -06001017bool ClLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
1018 const TensorInfo& output,
1019 const ElementwiseUnaryDescriptor& descriptor,
1020 Optional<std::string&> reasonIfUnsupported) const
1021{
Sadik Armagan9fabf432020-05-27 13:40:58 +01001022 switch(descriptor.m_Operation)
josh minor4a3c6102020-01-06 16:40:46 -06001023 {
Sadik Armagan9fabf432020-05-27 13:40:58 +01001024 case UnaryOperation::Abs:
1025 FORWARD_WORKLOAD_VALIDATE_FUNC(ClAbsWorkloadValidate,
1026 reasonIfUnsupported,
1027 input,
1028 output);
1029 case UnaryOperation::Exp:
1030 FORWARD_WORKLOAD_VALIDATE_FUNC(ClExpWorkloadValidate,
1031 reasonIfUnsupported,
1032 input,
1033 output);
Teresa Charlin50de4fa2021-05-31 18:47:33 +01001034 case UnaryOperation::Log:
1035 FORWARD_WORKLOAD_VALIDATE_FUNC(ClLogWorkloadValidate,
1036 reasonIfUnsupported,
1037 input,
1038 output);
1039 case UnaryOperation::LogicalNot:
1040 FORWARD_WORKLOAD_VALIDATE_FUNC(ClLogicalNotWorkloadValidate,
1041 reasonIfUnsupported,
1042 input,
1043 output);
Sadik Armagan9fabf432020-05-27 13:40:58 +01001044 case UnaryOperation::Neg:
1045 FORWARD_WORKLOAD_VALIDATE_FUNC(ClNegWorkloadValidate,
1046 reasonIfUnsupported,
1047 input,
1048 output);
1049 case UnaryOperation::Rsqrt:
1050 FORWARD_WORKLOAD_VALIDATE_FUNC(ClRsqrtWorkloadValidate,
1051 reasonIfUnsupported,
1052 input,
1053 output);
Teresa Charlin50de4fa2021-05-31 18:47:33 +01001054 case UnaryOperation::Sin:
1055 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSinWorkloadValidate,
James Conroyfe3ec942020-11-18 14:20:53 +00001056 reasonIfUnsupported,
1057 input,
1058 output);
Teresa Charlinaac61122022-05-05 16:11:36 +01001059 case UnaryOperation::Sqrt:
1060 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSqrtWorkloadValidate,
1061 reasonIfUnsupported,
1062 input,
1063 output);
Sadik Armagan9fabf432020-05-27 13:40:58 +01001064 default:
1065 return false;
josh minor4a3c6102020-01-06 16:40:46 -06001066 }
josh minor4a3c6102020-01-06 16:40:46 -06001067}
1068
Teresa Charlin4b10fef2020-07-29 09:36:41 +01001069bool ClLayerSupport::IsFillSupported(const TensorInfo& input,
1070 const TensorInfo& output,
1071 const FillDescriptor& descriptor,
1072 Optional<std::string&> reasonIfUnsupported) const
Sadik Armagan66aecb02020-06-24 11:42:20 +01001073{
Teresa Charlin4b10fef2020-07-29 09:36:41 +01001074 armnn::IgnoreUnused(input);
1075 armnn::IgnoreUnused(output);
1076 armnn::IgnoreUnused(descriptor);
1077
1078 return IsClBackendSupported(reasonIfUnsupported);
Sadik Armagan66aecb02020-06-24 11:42:20 +01001079}
1080
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001081bool ClLayerSupport::IsFloorSupported(const TensorInfo& input,
1082 const TensorInfo& output,
1083 Optional<std::string&> reasonIfUnsupported) const
telsoa014fcda012018-03-09 14:13:49 +00001084{
Sadik Armagan9be49162019-10-30 16:15:26 +00001085 FORWARD_WORKLOAD_VALIDATE_FUNC(ClFloorWorkloadValidate,
1086 reasonIfUnsupported,
1087 input,
1088 output);
telsoa01c577f2c2018-08-31 09:22:23 +01001089}
1090
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001091bool ClLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
1092 const TensorInfo& output,
1093 const TensorInfo& weights,
1094 const TensorInfo& biases,
1095 const FullyConnectedDescriptor& descriptor,
1096 Optional<std::string&> reasonIfUnsupported) const
1097{
1098 FORWARD_WORKLOAD_VALIDATE_FUNC(ClFullyConnectedWorkloadValidate,
1099 reasonIfUnsupported,
1100 input,
1101 output,
1102 weights,
1103 biases,
Mike Kelly07810fc2020-11-12 10:58:48 +00001104 descriptor,
1105 nullptr);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001106}
1107
Teresa Charlin9ad2e5b2020-04-10 22:34:48 +01001108bool ClLayerSupport::IsGatherSupported(const TensorInfo& input0,
1109 const TensorInfo& input1,
1110 const TensorInfo& output,
Teresa Charlin52664732020-06-29 16:27:03 +01001111 const GatherDescriptor& descriptor,
Teresa Charlin9ad2e5b2020-04-10 22:34:48 +01001112 Optional<std::string&> reasonIfUnsupported) const
1113{
1114 FORWARD_WORKLOAD_VALIDATE_FUNC(ClGatherWorkloadValidate,
1115 reasonIfUnsupported,
1116 input0,
1117 input1,
Teresa Charlin52664732020-06-29 16:27:03 +01001118 output,
1119 descriptor);
Teresa Charlin9ad2e5b2020-04-10 22:34:48 +01001120}
1121
Teresa Charlin989e2f62022-04-27 16:26:11 +01001122bool ClLayerSupport::IsGatherNdSupported(const TensorInfo& input0,
1123 const TensorInfo& input1,
1124 const TensorInfo& output,
1125 Optional<std::string&> reasonIfUnsupported) const
1126{
1127 FORWARD_WORKLOAD_VALIDATE_FUNC(ClGatherNdWorkloadValidate,
1128 reasonIfUnsupported,
1129 input0,
1130 input1,
1131 output);
1132}
1133
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001134bool ClLayerSupport::IsInputSupported(const TensorInfo& input,
1135 Optional<std::string&> reasonIfUnsupported) const
1136{
Derek Lamberti901ea112019-12-10 22:07:09 +00001137 return IsClBackendSupported(reasonIfUnsupported, input);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001138}
1139
Aron Virginas-Tar8168f402019-10-04 13:10:16 +01001140bool ClLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
1141 const TensorInfo& output,
1142 const InstanceNormalizationDescriptor& descriptor,
1143 Optional<std::string&> reasonIfUnsupported) const
1144{
1145 FORWARD_WORKLOAD_VALIDATE_FUNC(ClInstanceNormalizationWorkloadValidate,
1146 reasonIfUnsupported,
1147 input,
1148 output,
1149 descriptor);
1150}
1151
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001152bool ClLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
1153 const TensorInfo& output,
1154 const L2NormalizationDescriptor& descriptor,
1155 Optional<std::string&> reasonIfUnsupported) const
1156{
1157 FORWARD_WORKLOAD_VALIDATE_FUNC(ClL2NormalizationWorkloadValidate,
1158 reasonIfUnsupported,
1159 input,
1160 output,
1161 descriptor);
1162}
1163
James Conroyfe3ec942020-11-18 14:20:53 +00001164bool ClLayerSupport::IsLogicalBinarySupported(const TensorInfo& input0,
1165 const TensorInfo& input1,
1166 const TensorInfo& output,
1167 const LogicalBinaryDescriptor& descriptor,
1168 Optional<std::string&> reasonIfUnsupported) const
1169{
1170 IgnoreUnused(output);
1171
1172 switch(descriptor.m_Operation)
1173 {
1174 case LogicalBinaryOperation::LogicalAnd:
1175 FORWARD_WORKLOAD_VALIDATE_FUNC(ClLogicalAndWorkloadValidate,
1176 reasonIfUnsupported,
1177 input0,
1178 input1,
1179 output);
1180 case LogicalBinaryOperation::LogicalOr:
1181 FORWARD_WORKLOAD_VALIDATE_FUNC(ClLogicalOrWorkloadValidate,
1182 reasonIfUnsupported,
1183 input0,
1184 input1,
1185 output);
1186 default:
1187 return false;
1188 }
1189}
1190
1191
Teresa Charlin8398edc2020-07-20 14:23:02 +01001192bool ClLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
1193 const TensorInfo& output,
1194 const LogSoftmaxDescriptor& descriptor,
1195 Optional<std::string&> reasonIfUnsupported) const
1196{
1197 FORWARD_WORKLOAD_VALIDATE_FUNC(ClLogSoftmaxWorkloadValidate,
1198 reasonIfUnsupported,
1199 input,
1200 output,
1201 descriptor);
1202}
1203
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001204bool ClLayerSupport::IsLstmSupported(const TensorInfo& input,
1205 const TensorInfo& outputStateIn,
1206 const TensorInfo& cellStateIn,
1207 const TensorInfo& scratchBuffer,
1208 const TensorInfo& outputStateOut,
1209 const TensorInfo& cellStateOut,
1210 const TensorInfo& output,
1211 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001212 const LstmInputParamsInfo& paramsInfo,
1213 Optional<std::string&> reasonIfUnsupported) const
telsoa01c577f2c2018-08-31 09:22:23 +01001214{
arovir01085f0a42018-10-08 14:48:19 +01001215 FORWARD_WORKLOAD_VALIDATE_FUNC(ClLstmFloatWorkloadValidate,
1216 reasonIfUnsupported,
1217 input,
1218 outputStateIn,
1219 cellStateIn,
1220 scratchBuffer,
1221 outputStateOut,
1222 cellStateOut,
1223 output,
1224 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001225 paramsInfo);
telsoa01c577f2c2018-08-31 09:22:23 +01001226}
1227
keidav01a959ee52018-12-19 10:04:58 +00001228bool ClLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1229 const TensorInfo& input1,
1230 const TensorInfo& output,
1231 Optional<std::string&> reasonIfUnsupported) const
1232{
1233 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMaximumWorkloadValidate,
1234 reasonIfUnsupported,
1235 input0,
1236 input1,
1237 output);
1238}
1239
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001240bool ClLayerSupport::IsMeanSupported(const TensorInfo& input,
1241 const TensorInfo& output,
1242 const MeanDescriptor& descriptor,
1243 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001244{
Matteo Martincigh28dcab62018-10-19 16:40:03 +01001245 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMeanValidate,
1246 reasonIfUnsupported,
1247 input,
1248 output,
1249 descriptor);
narpra0132b90462018-09-13 11:07:48 +01001250}
1251
saoste019292aa32019-01-08 13:55:59 +00001252bool ClLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1253 const TensorInfo& input1,
1254 const TensorInfo& output,
1255 Optional<std::string&> reasonIfUnsupported) const
1256{
1257 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMinimumWorkloadValidate,
1258 reasonIfUnsupported,
1259 input0,
1260 input1,
1261 output);
1262}
1263
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001264bool ClLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1265 const TensorInfo& input1,
1266 const TensorInfo& output,
1267 Optional<std::string&> reasonIfUnsupported) const
1268{
1269 FORWARD_WORKLOAD_VALIDATE_FUNC(ClMultiplicationWorkloadValidate,
1270 reasonIfUnsupported,
1271 input0,
1272 input1,
Mike Kelly07810fc2020-11-12 10:58:48 +00001273 output,
1274 nullptr);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001275}
1276
1277bool ClLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1278 const TensorInfo& output,
1279 const NormalizationDescriptor& descriptor,
1280 Optional<std::string&> reasonIfUnsupported) const
1281{
1282 FORWARD_WORKLOAD_VALIDATE_FUNC(ClNormalizationWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
1283}
1284
1285bool ClLayerSupport::IsOutputSupported(const TensorInfo& output,
1286 Optional<std::string&> reasonIfUnsupported) const
1287{
Derek Lamberti901ea112019-12-10 22:07:09 +00001288 return IsClBackendSupported(reasonIfUnsupported, output);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001289}
1290
1291bool ClLayerSupport::IsPadSupported(const TensorInfo& input,
1292 const TensorInfo& output,
1293 const PadDescriptor& descriptor,
1294 Optional<std::string&> reasonIfUnsupported) const
arovir01085f0a42018-10-08 14:48:19 +01001295{
1296 FORWARD_WORKLOAD_VALIDATE_FUNC(ClPadValidate,
1297 reasonIfUnsupported,
1298 input,
1299 output,
1300 descriptor);
1301}
1302
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001303bool ClLayerSupport::IsPermuteSupported(const TensorInfo& input,
1304 const TensorInfo& output,
1305 const PermuteDescriptor& descriptor,
1306 Optional<std::string&> reasonIfUnsupported) const
1307{
Matthew Bentham9820d302019-11-27 17:24:47 +00001308 FORWARD_WORKLOAD_VALIDATE_FUNC(ClPermuteWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
telsoa014fcda012018-03-09 14:13:49 +00001309}
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001310
1311bool ClLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1312 const TensorInfo& output,
1313 const Pooling2dDescriptor& descriptor,
1314 Optional<std::string&> reasonIfUnsupported) const
1315{
1316 FORWARD_WORKLOAD_VALIDATE_FUNC(ClPooling2dWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
1317}
1318
Ryan OSheabab8fa92022-03-09 10:29:02 +00001319bool ClLayerSupport::IsPooling3dSupported(const TensorInfo& input,
1320 const TensorInfo& output,
1321 const Pooling3dDescriptor& descriptor,
1322 Optional<std::string&> reasonIfUnsupported) const
1323{
1324 FORWARD_WORKLOAD_VALIDATE_FUNC(ClPooling3dWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
1325}
1326
Nikhil Raj91e4c6d2019-07-05 12:22:58 +01001327bool ClLayerSupport::IsPreluSupported(const armnn::TensorInfo &input,
1328 const armnn::TensorInfo &alpha,
1329 const armnn::TensorInfo &output,
1330 armnn::Optional<std::string &> reasonIfUnsupported) const
1331{
1332 FORWARD_WORKLOAD_VALIDATE_FUNC(ClPreluWorkloadValidate, reasonIfUnsupported, input, alpha, output);
1333}
1334
Ryan OShea2323af42020-05-13 16:36:19 +01001335bool ClLayerSupport::IsQLstmSupported(const TensorInfo& input,
1336 const TensorInfo& previousOutputIn,
1337 const TensorInfo& previousCellStateIn,
1338 const TensorInfo& outputStateOut,
1339 const TensorInfo& cellStateOut,
1340 const TensorInfo& output,
1341 const QLstmDescriptor& descriptor,
1342 const LstmInputParamsInfo& paramsInfo,
1343 Optional<std::string&> reasonIfUnsupported) const
1344{
1345 if (input.GetDataType() == armnn::DataType::QAsymmS8 &&
1346 previousOutputIn.GetDataType() == armnn::DataType::QAsymmS8 &&
1347 previousCellStateIn.GetDataType() == armnn::DataType::QSymmS16 &&
1348 outputStateOut.GetDataType() == armnn::DataType::QAsymmS8 &&
1349 cellStateOut.GetDataType() == armnn::DataType::QSymmS16 &&
1350 output.GetDataType() == armnn::DataType::QAsymmS8)
1351 {
1352 FORWARD_WORKLOAD_VALIDATE_FUNC(ClQLstmWorkloadValidate,
1353 reasonIfUnsupported,
1354 input,
1355 previousCellStateIn,
1356 previousOutputIn,
1357 cellStateOut,
1358 outputStateOut,
1359 output,
1360 descriptor,
1361 paramsInfo);
1362 }
1363 else
1364 {
1365 return false;
1366 }
1367}
1368
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001369bool ClLayerSupport::IsQuantizedLstmSupported(const TensorInfo& input,
1370 const TensorInfo& previousCellStateIn,
1371 const TensorInfo& previousOutputIn,
1372 const TensorInfo& cellStateOut,
1373 const TensorInfo& output,
1374 const QuantizedLstmInputParamsInfo& paramsInfo,
1375 Optional<std::string&> reasonIfUnsupported) const
1376{
1377 FORWARD_WORKLOAD_VALIDATE_FUNC(ClQuantizedLstmWorkloadValidate,
1378 reasonIfUnsupported,
1379 input,
1380 previousCellStateIn,
1381 previousOutputIn,
1382 cellStateOut,
1383 output,
1384 paramsInfo);
1385}
1386
Sadik Armagan20ec2492019-05-31 09:09:44 +01001387bool ClLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1388 const TensorInfo& output,
1389 Optional<std::string&> reasonIfUnsupported) const
1390{
1391 FORWARD_WORKLOAD_VALIDATE_FUNC(ClQuantizeWorkloadValidate,
1392 reasonIfUnsupported,
1393 input,
1394 output);
1395}
1396
Sadik Armagana2747482021-02-09 10:28:54 +00001397bool ClLayerSupport::IsReduceSupported(const TensorInfo& input,
1398 const TensorInfo& output,
1399 const ReduceDescriptor& descriptor,
1400 Optional<std::string&> reasonIfUnsupported) const
1401{
1402 FORWARD_WORKLOAD_VALIDATE_FUNC(ClReduceWorkloadValidate,
1403 reasonIfUnsupported,
1404 input,
1405 output,
1406 descriptor);
1407}
1408
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001409bool ClLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001410 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001411 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001412 Optional<std::string&> reasonIfUnsupported) const
1413{
Jan Eilers8eb25602020-03-09 12:13:48 +00001414 IgnoreUnused(descriptor);
Kevin Maya023c402019-12-12 17:28:05 +00001415 FORWARD_WORKLOAD_VALIDATE_FUNC(ClReshapeWorkloadValidate, reasonIfUnsupported, input, output);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001416}
1417
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001418bool ClLayerSupport::IsResizeSupported(const TensorInfo& input,
1419 const TensorInfo& output,
1420 const ResizeDescriptor& descriptor,
1421 Optional<std::string&> reasonIfUnsupported) const
1422{
Aron Virginas-Tarcc0cefb2019-07-02 17:25:47 +01001423 FORWARD_WORKLOAD_VALIDATE_FUNC(ClResizeWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001424}
1425
Aron Virginas-Tar94c4fef2019-11-25 15:37:08 +00001426bool ClLayerSupport::IsSliceSupported(const TensorInfo& input,
1427 const TensorInfo& output,
1428 const SliceDescriptor& descriptor,
1429 Optional<std::string&> reasonIfUnsupported) const
1430{
1431 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSliceWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
1432}
1433
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001434bool ClLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1435 const TensorInfo& output,
1436 const SoftmaxDescriptor& descriptor,
1437 Optional<std::string&> reasonIfUnsupported) const
1438{
Francis Murtagh3b938352019-07-26 15:44:17 +01001439 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSoftmaxWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001440}
1441
Sadik Armaganf4464322018-12-20 16:19:12 +00001442bool ClLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1443 const TensorInfo& output,
1444 const SpaceToBatchNdDescriptor& descriptor,
1445 Optional<std::string&> reasonIfUnsupported) const
1446{
1447 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSpaceToBatchNdWorkloadValidate,
1448 reasonIfUnsupported,
1449 input,
1450 output,
1451 descriptor);
1452}
1453
James Conroyd2aa85e2019-07-01 17:12:40 +01001454bool ClLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
1455 const TensorInfo& output,
1456 const SpaceToDepthDescriptor& descriptor,
1457 Optional<std::string&> reasonIfUnsupported) const
1458{
1459 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSpaceToDepthWorkloadValidate,
1460 reasonIfUnsupported,
1461 input,
1462 output,
1463 descriptor);
1464}
1465
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001466bool ClLayerSupport::IsSplitterSupported(const TensorInfo& input,
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001467 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1468 const ViewsDescriptor& descriptor,
1469 Optional<std::string&> reasonIfUnsupported) const
1470{
Narumol Prangnawarat74135832019-05-23 15:07:33 +01001471#if defined(ARMCOMPUTECL_ENABLED)
1472 // Split along the last dimension, cannot use sub-tensors
1473 // as width and height of the sub-tensors do not match
1474 // the width and height of the parent tensor
1475 // in case of input with more than 2D.
1476 std::set<unsigned int> splitAxis = ComputeSplitAxis(descriptor, input.GetShape());
1477 if (descriptor.GetNumDimensions() > 2 && splitAxis.size() == 1 &&
1478 *splitAxis.begin() == descriptor.GetNumDimensions() - 1 )
1479 {
1480 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSplitterWorkloadValidate,
1481 reasonIfUnsupported,
1482 input,
1483 outputs,
1484 *splitAxis.begin());
1485 }
1486#endif
Jan Eilers8eb25602020-03-09 12:13:48 +00001487 IgnoreUnused(descriptor);
Narumol Prangnawarat74135832019-05-23 15:07:33 +01001488 for (auto output : outputs)
1489 {
1490 if (!input.IsTypeSpaceMatch(output)) // Cannot use sub-tensors if the types are not same space
1491 {
1492 SetValueChecked(reasonIfUnsupported, "Cl Splitter: Types and quantization parameters must match.");
1493 return false;
1494 }
1495 }
1496 return true;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001497}
1498
Matthew Jacksond5166102019-07-31 14:06:28 +01001499bool ClLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1500 const TensorInfo& output,
1501 const StackDescriptor& descriptor,
1502 Optional<std::string&> reasonIfUnsupported) const
1503{
1504 FORWARD_WORKLOAD_VALIDATE_FUNC(ClStackWorkloadValidate,
1505 reasonIfUnsupported,
1506 inputs,
1507 output,
1508 descriptor);
1509}
1510
keidav01d74dc912018-12-10 18:16:07 +00001511bool ClLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
1512 const TensorInfo& output,
1513 const StridedSliceDescriptor& descriptor,
1514 Optional<std::string&> reasonIfUnsupported) const
1515{
1516 FORWARD_WORKLOAD_VALIDATE_FUNC(ClStridedSliceWorkloadValidate,
1517 reasonIfUnsupported,
1518 input,
1519 output,
1520 descriptor);
1521}
1522
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001523bool ClLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
1524 const TensorInfo& input1,
1525 const TensorInfo& output,
1526 Optional<std::string&> reasonIfUnsupported) const
1527{
1528 FORWARD_WORKLOAD_VALIDATE_FUNC(ClSubtractionValidate,
1529 reasonIfUnsupported,
1530 input0,
1531 input1,
Mike Kelly07810fc2020-11-12 10:58:48 +00001532 output,
1533 nullptr);
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001534}
1535
Cian McGriskin3b3dcbf2023-07-26 11:52:47 +01001536bool ClLayerSupport::IsTileSupported(const TensorInfo& input,
1537 const TensorInfo& output,
1538 const TileDescriptor& descriptor,
1539 Optional<std::string&> reasonIfUnsupported) const
1540{
1541 FORWARD_WORKLOAD_VALIDATE_FUNC(ClTileWorkloadValidate,
1542 reasonIfUnsupported,
1543 input,
1544 output,
1545 descriptor);
1546}
1547
Aron Virginas-Tar7a3e2fe2019-06-27 18:54:47 +01001548bool ClLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
1549 const TensorInfo& output,
1550 const TransposeConvolution2dDescriptor& descriptor,
1551 const TensorInfo& weights,
1552 const Optional<TensorInfo>& biases,
1553 Optional<std::string&> reasonIfUnsupported) const
1554{
1555 FORWARD_WORKLOAD_VALIDATE_FUNC(ClTransposeConvolution2dWorkloadValidate,
1556 reasonIfUnsupported,
1557 input,
1558 output,
1559 descriptor,
1560 weights,
1561 biases);
1562}
1563
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001564bool ClLayerSupport::IsTransposeSupported(const TensorInfo& input,
1565 const TensorInfo& output,
1566 const TransposeDescriptor& descriptor,
1567 Optional<std::string&> reasonIfUnsupported) const
1568{
1569 FORWARD_WORKLOAD_VALIDATE_FUNC(ClTransposeWorkloadValidate, reasonIfUnsupported, input, output, descriptor);
1570}
1571
Cathal Corbett4952a3e2022-03-03 15:14:18 +00001572bool ClLayerSupport::IsUnidirectionalSequenceLstmSupported(const TensorInfo& input,
1573 const TensorInfo& outputStateIn,
1574 const TensorInfo& cellStateIn,
Mike Kelly12994962022-04-21 11:57:09 +01001575 const TensorInfo& outputStateOut,
1576 const TensorInfo& cellStateOut,
Cathal Corbett4952a3e2022-03-03 15:14:18 +00001577 const TensorInfo& output,
Cathal Corbett4952a3e2022-03-03 15:14:18 +00001578 const UnidirectionalSequenceLstmDescriptor& descriptor,
1579 const LstmInputParamsInfo& paramsInfo,
1580 Optional<std::string&> reasonIfUnsupported) const
1581{
1582 FORWARD_WORKLOAD_VALIDATE_FUNC(ClUnidirectionalSequenceLstmFloatWorkloadValidate,
1583 reasonIfUnsupported,
1584 input,
1585 outputStateIn,
1586 cellStateIn,
Mike Kelly12994962022-04-21 11:57:09 +01001587 outputStateOut,
1588 cellStateOut,
Cathal Corbett4952a3e2022-03-03 15:14:18 +00001589 output,
Cathal Corbett4952a3e2022-03-03 15:14:18 +00001590 descriptor,
1591 paramsInfo);
1592}
1593
Aron Virginas-Tarbcf9f162018-10-15 11:47:37 +01001594} // namespace armnn