blob: 2d70d7add029617f2cfd2f387da3dfdf1ade6bf4 [file] [log] [blame]
Ferran Balaguerf54c9462019-07-10 12:43:58 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <armnn/BackendHelper.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +00007#include <armnn/BackendRegistry.hpp>
Matthew Sloyan81beae32021-07-13 19:46:11 +01008#include <armnn/Logging.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +00009
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000010#include <armnn/backends/IBackendInternal.hpp>
Ferran Balaguerf54c9462019-07-10 12:43:58 +010011
12namespace armnn
13{
14
Francis Murtagh7909c532021-01-28 14:25:15 +000015// Return LayerSupportHandle instead of the previous pointer to ILayerSupport.
16LayerSupportHandle GetILayerSupportByBackendId(const armnn::BackendId& backend)
Ferran Balaguerf54c9462019-07-10 12:43:58 +010017{
18 BackendRegistry& backendRegistry = armnn::BackendRegistryInstance();
19
20 if (!backendRegistry.IsBackendRegistered(backend))
21 {
Francis Murtagh7909c532021-01-28 14:25:15 +000022 return LayerSupportHandle(nullptr);
Ferran Balaguerf54c9462019-07-10 12:43:58 +010023 }
24
25 auto factoryFunc = backendRegistry.GetFactory(backend);
26 auto backendObject = factoryFunc();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000027 return LayerSupportHandle(backendObject->GetLayerSupport(), backend);
28}
29
Finn Williamsb9af86e2021-05-26 18:38:12 +010030Optional<const BackendOptions::BackendOption> GetCapability(const std::string& backendCapabilityName,
31 const BackendCapabilities& capabilities)
32{
33 for (size_t i=0; i < capabilities.GetOptionCount(); i++)
34 {
35 const auto& capability = capabilities.GetOption(i);
36 if (backendCapabilityName == capability.GetName())
37 {
38 return capability;
39 }
40 }
41 return EmptyOptional();
42}
43
44Optional<const BackendOptions::BackendOption> GetCapability(const std::string& backendCapabilityName,
45 const armnn::BackendId& backend)
46{
47 auto const& backendRegistry = armnn::BackendRegistryInstance();
48 if (backendRegistry.IsBackendRegistered(backend))
49 {
50 auto factoryFunc = backendRegistry.GetFactory(backend);
51 auto backendObject = factoryFunc();
52 auto capabilities = backendObject->GetCapabilities();
53 return GetCapability(backendCapabilityName, capabilities);
54 }
55 return EmptyOptional();
56}
57
58bool HasCapability(const std::string& name, const BackendCapabilities& capabilities)
59{
60 return GetCapability(name, capabilities).has_value();
61}
62
63bool HasCapability(const std::string& name, const armnn::BackendId& backend)
64{
65 return GetCapability(name, backend).has_value();
66}
67
68bool HasCapability(const BackendOptions::BackendOption& capability, const BackendCapabilities& capabilities)
69{
70 for (size_t i=0; i < capabilities.GetOptionCount(); i++)
71 {
72 const auto& backendCapability = capabilities.GetOption(i);
73 if (capability.GetName() == backendCapability.GetName())
74 {
75 if (capability.GetValue().IsBool() && backendCapability.GetValue().IsBool())
76 {
77 return capability.GetValue().AsBool() == backendCapability.GetValue().AsBool();
78 }
79 else if(capability.GetValue().IsFloat() && backendCapability.GetValue().IsFloat())
80 {
81 return capability.GetValue().AsFloat() == backendCapability.GetValue().AsFloat();
82 }
83 else if(capability.GetValue().IsInt() && backendCapability.GetValue().IsInt())
84 {
85 return capability.GetValue().AsInt() == backendCapability.GetValue().AsInt();
86 }
87 else if(capability.GetValue().IsString() && backendCapability.GetValue().IsString())
88 {
89 return capability.GetValue().AsString() == backendCapability.GetValue().AsString();
90 }
91 else if(capability.GetValue().IsUnsignedInt() && backendCapability.GetValue().IsUnsignedInt())
92 {
93 return capability.GetValue().AsUnsignedInt() == backendCapability.GetValue().AsUnsignedInt();
94 }
95 }
96 }
97 return false;
98}
99
100bool HasCapability(const BackendOptions::BackendOption& backendOption, const armnn::BackendId& backend)
101{
102 auto const& backendRegistry = armnn::BackendRegistryInstance();
103 if (backendRegistry.IsBackendRegistered(backend))
104 {
105 auto factoryFunc = backendRegistry.GetFactory(backend);
106 auto backendObject = factoryFunc();
107 auto capabilities = backendObject->GetCapabilities();
108 return HasCapability(backendOption, capabilities);
109 }
110 return false;
111}
112
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000113/// Convenience function to check a capability on a backend
114bool IsCapabilitySupported(const armnn::BackendId& backend, armnn::BackendCapability capability)
115{
116 bool hasCapability = false;
117 auto const& backendRegistry = armnn::BackendRegistryInstance();
118 if (backendRegistry.IsBackendRegistered(backend))
119 {
120 auto factoryFunc = backendRegistry.GetFactory(backend);
121 auto backendObject = factoryFunc();
Finn Williamsb9af86e2021-05-26 18:38:12 +0100122 ARMNN_NO_DEPRECATE_WARN_BEGIN
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000123 hasCapability = backendObject->HasCapability(capability);
Finn Williamsb9af86e2021-05-26 18:38:12 +0100124 ARMNN_NO_DEPRECATE_WARN_END
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000125 }
126 return hasCapability;
Ferran Balaguerf54c9462019-07-10 12:43:58 +0100127}
128
Sadik Armaganb7851f92021-10-06 16:37:02 +0100129unsigned int GetNumberOfCacheFiles(const armnn::BackendId& backend)
130{
131 auto const& backendRegistry = armnn::BackendRegistryInstance();
132 if (backendRegistry.IsBackendRegistered(backend))
133 {
134 auto factoryFunc = backendRegistry.GetFactory(backend);
135 auto backendObject = factoryFunc();
136 return backendObject->GetNumberOfCacheFiles();
137 }
138 return 0;
139}
140
Francis Murtagh7909c532021-01-28 14:25:15 +0000141bool LayerSupportHandle::IsBackendRegistered() const
142{
143 if (m_LayerSupport)
144 {
145 return true;
146 }
147
148 return false;
Ferran Balaguerf54c9462019-07-10 12:43:58 +0100149}
Francis Murtagh7909c532021-01-28 14:25:15 +0000150
Cathal Corbett34b429c2021-12-24 12:24:40 +0000151using TensorInfos = std::vector<TensorInfo>;
152
Francis Murtagh7909c532021-01-28 14:25:15 +0000153bool LayerSupportHandle::IsActivationSupported(const TensorInfo& input,
154 const TensorInfo& output,
155 const ActivationDescriptor& descriptor,
156 Optional<std::string&> reasonIfUnsupported)
157{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000158 TensorInfos infos{input, output};
159
160 return m_LayerSupport->IsLayerSupported(LayerType::Activation,
161 infos,
162 descriptor,
163 EmptyOptional(),
164 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000165 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000166}
167
168bool LayerSupportHandle::IsAdditionSupported(const TensorInfo& input0,
169 const TensorInfo& input1,
170 const TensorInfo& output,
171 Optional<std::string&> reasonIfUnsupported)
172{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000173 TensorInfos infos{input0, input1, output};
174
175 return m_LayerSupport->IsLayerSupported(LayerType::Addition,
176 infos,
177 BaseDescriptor(),
178 EmptyOptional(),
179 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000180 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000181}
182
183bool LayerSupportHandle::IsArgMinMaxSupported(const TensorInfo& input,
184 const TensorInfo& output,
185 const ArgMinMaxDescriptor& descriptor,
186 Optional<std::string&> reasonIfUnsupported)
187{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000188 TensorInfos infos{input, output};
189
190 return m_LayerSupport->IsLayerSupported(LayerType::ArgMinMax,
191 infos,
192 descriptor,
193 EmptyOptional(),
194 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000195 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000196}
197
198bool LayerSupportHandle::IsBatchNormalizationSupported(const TensorInfo& input,
199 const TensorInfo& output,
200 const TensorInfo& mean,
201 const TensorInfo& var,
202 const TensorInfo& beta,
203 const TensorInfo& gamma,
204 const BatchNormalizationDescriptor& descriptor,
205 Optional<std::string&> reasonIfUnsupported)
206{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000207 TensorInfos infos{input, output, mean, var, beta, gamma};
208
209 return m_LayerSupport->IsLayerSupported(LayerType::BatchNormalization,
210 infos,
211 descriptor,
212 EmptyOptional(),
213 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000214 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000215}
216
217bool LayerSupportHandle::IsBatchToSpaceNdSupported(const TensorInfo& input,
218 const TensorInfo& output,
219 const BatchToSpaceNdDescriptor& descriptor,
220 Optional<std::string&> reasonIfUnsupported)
221{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000222 TensorInfos infos{input, output};
223
224 return m_LayerSupport->IsLayerSupported(LayerType::BatchToSpaceNd,
225 infos,
226 descriptor,
227 EmptyOptional(),
228 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000229 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000230}
231
mathad01b392e982021-04-07 12:07:30 +0100232bool LayerSupportHandle::IsCastSupported(const TensorInfo& input,
233 const TensorInfo& output,
234 Optional<std::string&> reasonIfUnsupported)
235{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000236 TensorInfos infos{input, output};
237
238 return m_LayerSupport->IsLayerSupported(LayerType::Cast,
239 infos,
240 BaseDescriptor(),
241 EmptyOptional(),
242 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000243 reasonIfUnsupported);
mathad01b392e982021-04-07 12:07:30 +0100244}
245
Cathal Corbett34b429c2021-12-24 12:24:40 +0000246bool LayerSupportHandle::IsChannelShuffleSupported(const TensorInfo &input,
247 const TensorInfo &output,
Simon Obute51f67772021-09-03 15:50:13 +0100248 const ChannelShuffleDescriptor &descriptor,
249 Optional<std::string &> reasonIfUnsupported)
250{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000251 TensorInfos infos{input, output};
252
253 return m_LayerSupport->IsLayerSupported(LayerType::ChannelShuffle,
254 infos,
255 descriptor,
256 EmptyOptional(),
257 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000258 reasonIfUnsupported);
Simon Obute51f67772021-09-03 15:50:13 +0100259}
260
Francis Murtagh7909c532021-01-28 14:25:15 +0000261bool LayerSupportHandle::IsComparisonSupported(const TensorInfo& input0,
262 const TensorInfo& input1,
263 const TensorInfo& output,
264 const ComparisonDescriptor& descriptor,
265 Optional<std::string&> reasonIfUnsupported)
266{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000267 TensorInfos infos{input0, input1, output};
268
269 return m_LayerSupport->IsLayerSupported(LayerType::Comparison,
270 infos,
271 descriptor,
272 EmptyOptional(),
273 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000274 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000275}
276
277bool LayerSupportHandle::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
278 const TensorInfo& output,
279 const OriginsDescriptor& descriptor,
280 Optional<std::string&> reasonIfUnsupported)
281{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000282 TensorInfos infos;
283 for (const TensorInfo* inputInfo : inputs)
284 {
285 infos.push_back(*inputInfo);
286 }
287 infos.push_back(output);
288
289 return m_LayerSupport->IsLayerSupported(LayerType::Concat,
290 infos,
291 descriptor,
292 EmptyOptional(),
293 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000294 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000295}
296
297bool LayerSupportHandle::IsConstantSupported(const TensorInfo& output,
298 Optional<std::string&> reasonIfUnsupported)
299{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000300 TensorInfos infos{output};
301
302 return m_LayerSupport->IsLayerSupported(LayerType::Constant,
303 infos,
304 BaseDescriptor(),
305 EmptyOptional(),
306 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000307 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000308}
309
310bool LayerSupportHandle::IsConvertBf16ToFp32Supported(const TensorInfo& input,
311 const TensorInfo& output,
312 Optional<std::string&> reasonIfUnsupported)
313{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000314 TensorInfos infos{input, output};
315
316 return m_LayerSupport->IsLayerSupported(LayerType::ConvertBf16ToFp32,
317 infos,
318 BaseDescriptor(),
319 EmptyOptional(),
320 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000321 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000322}
323
324bool LayerSupportHandle::IsConvertFp32ToBf16Supported(const TensorInfo& input,
325 const TensorInfo& output,
326 Optional<std::string&> reasonIfUnsupported)
327{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000328 TensorInfos infos{input, output};
329
330 return m_LayerSupport->IsLayerSupported(LayerType::ConvertFp32ToBf16,
331 infos,
332 BaseDescriptor(),
333 EmptyOptional(),
334 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000335 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000336}
337
338bool LayerSupportHandle::IsConvertFp16ToFp32Supported(const TensorInfo& input,
339 const TensorInfo& output,
340 Optional<std::string&> reasonIfUnsupported)
341{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000342 TensorInfos infos{input, output};
343
344 return m_LayerSupport->IsLayerSupported(LayerType::ConvertFp16ToFp32,
345 infos,
346 BaseDescriptor(),
347 EmptyOptional(),
348 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000349 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000350}
351
352bool LayerSupportHandle::IsConvertFp32ToFp16Supported(const TensorInfo& input,
353 const TensorInfo& output,
354 Optional<std::string&> reasonIfUnsupported)
355{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000356 TensorInfos infos{input, output};
357
358 return m_LayerSupport->IsLayerSupported(LayerType::ConvertFp32ToFp16,
359 infos,
360 BaseDescriptor(),
361 EmptyOptional(),
362 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000363 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000364}
365
366bool LayerSupportHandle::IsConvolution2dSupported(const TensorInfo& input,
367 const TensorInfo& output,
368 const Convolution2dDescriptor& descriptor,
369 const TensorInfo& weights,
370 const Optional<TensorInfo>& biases,
371 Optional<std::string&> reasonIfUnsupported)
372{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000373 TensorInfo biasesVal = biases.has_value() ? biases.value() : TensorInfo();
374 TensorInfos infos{input, output, weights, biasesVal};
375
Keith Davis2cddc722022-04-07 11:32:00 +0100376 Optional<const BackendOptions::BackendOption> capability ;
377 if(!m_BackendId.IsUndefined())
378 {
379 capability = GetCapability("ConstantTensorsAsInputs", m_BackendId);
380 if(!capability.has_value() || capability.value().GetValue().AsBool() == false)
381 {
382 if(!weights.IsConstant())
383 {
384 return false;
385 }
386 if (descriptor.m_BiasEnabled && !biases.has_value())
387 {
388 return false;
389 }
390
391
392 // At the first stage we will only print a warning. this is to give
393 // backend developers a chance to adopt and read weights from input slots.
394 ARMNN_LOG(warning) << "The backend makes use of a deprecated interface to read constant tensors. "
395 "If you are a backend developer please find more information in our "
396 "doxygen documentation on github https://github.com/ARM-software/armnn "
397 "under the keyword 'ConstTensorsAsInputs'.";
398 }
399 }
400
Cathal Corbett34b429c2021-12-24 12:24:40 +0000401 return m_LayerSupport->IsLayerSupported(LayerType::Convolution2d,
402 infos,
403 descriptor,
404 EmptyOptional(),
405 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000406 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000407}
408
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100409bool LayerSupportHandle::IsConvolution3dSupported(const TensorInfo& input,
410 const TensorInfo& output,
411 const Convolution3dDescriptor& descriptor,
412 const TensorInfo& weights,
413 const Optional<TensorInfo>& biases,
414 Optional<std::string&> reasonIfUnsupported)
415{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000416 TensorInfo biasesVal = biases.has_value() ? biases.value() : TensorInfo();
417 TensorInfos infos{input, output, weights, biasesVal};
418
419 return m_LayerSupport->IsLayerSupported(LayerType::Convolution3d,
420 infos,
421 descriptor,
422 EmptyOptional(),
423 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000424 reasonIfUnsupported);
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100425}
426
Francis Murtagh7909c532021-01-28 14:25:15 +0000427bool LayerSupportHandle::IsDebugSupported(const TensorInfo& input,
428 const TensorInfo& output,
429 Optional<std::string&> reasonIfUnsupported)
430{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000431 TensorInfos infos{input, output};
432
433 return m_LayerSupport->IsLayerSupported(LayerType::Debug,
434 infos,
435 BaseDescriptor(),
436 EmptyOptional(),
437 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000438 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000439}
440
441bool LayerSupportHandle::IsDepthToSpaceSupported(const TensorInfo& input,
442 const TensorInfo& output,
443 const DepthToSpaceDescriptor& descriptor,
444 Optional<std::string&> reasonIfUnsupported)
445{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000446 TensorInfos infos{input, output};
447
448 return m_LayerSupport->IsLayerSupported(LayerType::DepthToSpace,
449 infos,
450 descriptor,
451 EmptyOptional(),
452 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000453 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000454}
455
456bool LayerSupportHandle::IsDepthwiseConvolutionSupported(
457 const TensorInfo& input,
458 const TensorInfo& output,
459 const DepthwiseConvolution2dDescriptor& descriptor,
460 const TensorInfo& weights,
461 const Optional<TensorInfo>& biases,
462 Optional<std::string&> reasonIfUnsupported)
463{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000464 TensorInfo biasesVal = biases.has_value() ? biases.value() : TensorInfo();
465 TensorInfos infos{input, output, weights, biasesVal};
466
Cathal Corbett06902652022-04-14 17:55:11 +0100467 Optional<const BackendOptions::BackendOption> capability ;
468 if(!m_BackendId.IsUndefined())
469 {
470 capability = GetCapability("ConstantTensorsAsInputs", m_BackendId);
471 if(!capability.has_value() || capability.value().GetValue().AsBool() == false)
472 {
473 if(!weights.IsConstant())
474 {
475 return false;
476 }
477 if(descriptor.m_BiasEnabled)
478 {
479 if(!biases.value().IsConstant())
480 {
481 return false;
482 }
483 }
484 // At the first stage we will only print a warning. this is to give
485 // backend developers a chance to adopt and read weights from input slots.
486 ARMNN_LOG(warning) << "The backend makes use of a deprecated interface to read constant tensors. "
487 "If you are a backend developer please find more information in our "
488 "doxygen documentation on github https://github.com/ARM-software/armnn "
489 "under the keyword 'ConstTensorsAsInputs'.";
490 }
491 }
492
Cathal Corbett34b429c2021-12-24 12:24:40 +0000493 return m_LayerSupport->IsLayerSupported(LayerType::DepthwiseConvolution2d,
494 infos,
495 descriptor,
496 EmptyOptional(),
497 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000498 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000499}
500
501bool LayerSupportHandle::IsDequantizeSupported(const TensorInfo& input,
502 const TensorInfo& output,
503 Optional<std::string&> reasonIfUnsupported)
504{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000505 TensorInfos infos{input, output};
506
507 return m_LayerSupport->IsLayerSupported(LayerType::Dequantize,
508 infos,
509 BaseDescriptor(),
510 EmptyOptional(),
511 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000512 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000513}
514
515bool LayerSupportHandle::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
516 const TensorInfo& scores,
517 const TensorInfo& anchors,
518 const TensorInfo& detectionBoxes,
519 const TensorInfo& detectionClasses,
520 const TensorInfo& detectionScores,
521 const TensorInfo& numDetections,
522 const DetectionPostProcessDescriptor& descriptor,
523 Optional<std::string&> reasonIfUnsupported)
524{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000525 TensorInfos infos{boxEncodings, scores, anchors, detectionBoxes, detectionClasses, detectionScores, numDetections};
526
527 return m_LayerSupport->IsLayerSupported(LayerType::DetectionPostProcess,
528 infos,
529 descriptor,
530 EmptyOptional(),
531 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000532 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000533}
534
535bool LayerSupportHandle::IsDilatedDepthwiseConvolutionSupported(
536 const TensorInfo& input,
537 const TensorInfo& output,
538 const DepthwiseConvolution2dDescriptor& descriptor,
539 const TensorInfo& weights,
540 const Optional<TensorInfo>& biases,
541 Optional<std::string&> reasonIfUnsupported)
542{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000543 TensorInfo biasesVal = biases.has_value() ? biases.value() : TensorInfo();
544 TensorInfos infos{input, output, weights, biasesVal};
545
Cathal Corbett06902652022-04-14 17:55:11 +0100546 Optional<const BackendOptions::BackendOption> capability ;
547 if(!m_BackendId.IsUndefined())
548 {
549 capability = GetCapability("ConstantTensorsAsInputs", m_BackendId);
550 if(!capability.has_value() || capability.value().GetValue().AsBool() == false)
551 {
552 if(!weights.IsConstant())
553 {
554 return false;
555 }
556 if(descriptor.m_BiasEnabled)
557 {
558 if(!biases.value().IsConstant())
559 {
560 return false;
561 }
562 }
563 // At the first stage we will only print a warning. this is to give
564 // backend developers a chance to adopt and read weights from input slots.
565 ARMNN_LOG(warning) << "The backend makes use of a deprecated interface to read constant tensors. "
566 "If you are a backend developer please find more information in our "
567 "doxygen documentation on github https://github.com/ARM-software/armnn "
568 "under the keyword 'ConstTensorsAsInputs'.";
569 }
570 }
571
Cathal Corbett34b429c2021-12-24 12:24:40 +0000572 return m_LayerSupport->IsLayerSupported(LayerType::DepthwiseConvolution2d,
573 infos,
574 descriptor,
575 EmptyOptional(),
576 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000577 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000578}
579
580bool LayerSupportHandle::IsDivisionSupported(const TensorInfo& input0,
581 const TensorInfo& input1,
582 const TensorInfo& output,
583 Optional<std::string&> reasonIfUnsupported)
584{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000585 TensorInfos infos{input0, input1, output};
586
587 return m_LayerSupport->IsLayerSupported(LayerType::Division,
588 infos,
589 BaseDescriptor(),
590 EmptyOptional(),
591 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000592 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000593}
594
595bool LayerSupportHandle::IsElementwiseUnarySupported(const TensorInfo& input,
596 const TensorInfo& output,
597 const ElementwiseUnaryDescriptor& descriptor,
598 Optional<std::string&> reasonIfUnsupported)
599{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000600 TensorInfos infos{input, output};
601
602 return m_LayerSupport->IsLayerSupported(LayerType::ElementwiseUnary,
603 infos,
604 descriptor,
605 EmptyOptional(),
606 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000607 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000608}
609
Francis Murtagh7909c532021-01-28 14:25:15 +0000610bool LayerSupportHandle::IsFakeQuantizationSupported(const TensorInfo& input,
611 const FakeQuantizationDescriptor& descriptor,
612 Optional<std::string&> reasonIfUnsupported)
613{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000614 TensorInfos infos{input};
615
616 return m_LayerSupport->IsLayerSupported(LayerType::FakeQuantization,
617 infos,
618 descriptor,
619 EmptyOptional(),
620 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000621 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000622}
623
624bool LayerSupportHandle::IsFillSupported(const TensorInfo& input,
625 const TensorInfo& output,
626 const FillDescriptor& descriptor,
627 Optional<std::string&> reasonIfUnsupported)
628{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000629 TensorInfos infos{input, output};
630
631 return m_LayerSupport->IsLayerSupported(LayerType::Fill,
632 infos,
633 descriptor,
634 EmptyOptional(),
635 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000636 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000637}
638
639bool LayerSupportHandle::IsFloorSupported(const TensorInfo& input,
640 const TensorInfo& output,
641 Optional<std::string&> reasonIfUnsupported)
642{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000643 TensorInfos infos{input, output};
644
645 return m_LayerSupport->IsLayerSupported(LayerType::Floor,
646 infos,
647 BaseDescriptor(),
648 EmptyOptional(),
649 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000650 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000651}
652
653bool LayerSupportHandle::IsFullyConnectedSupported(const TensorInfo& input,
654 const TensorInfo& output,
655 const TensorInfo& weights,
656 const TensorInfo& biases,
657 const FullyConnectedDescriptor& descriptor,
658 Optional<std::string&> reasonIfUnsupported)
659{
Matthew Sloyan81beae32021-07-13 19:46:11 +0100660 if(!m_BackendId.IsUndefined())
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000661 {
Matthew Sloyan81beae32021-07-13 19:46:11 +0100662 auto capability = GetCapability("ConstantTensorsAsInputs", m_BackendId);
663 if(!capability.has_value() || capability.value().GetValue().AsBool() == false)
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000664 {
Matthew Sloyan81beae32021-07-13 19:46:11 +0100665 if(!weights.IsConstant())
666 {
Francis Murtaghb28e5252022-02-09 14:36:04 +0000667 if (reasonIfUnsupported.has_value())
668 {
669 reasonIfUnsupported.value() =
Cathal Corbett06902652022-04-14 17:55:11 +0100670 "This backend might not support non constant weights. "
671 "If weights are constant make sure to set IsConstant when creating TensorInfo";
Francis Murtaghb28e5252022-02-09 14:36:04 +0000672 }
673
Matthew Sloyan81beae32021-07-13 19:46:11 +0100674 return false;
675 }
676 if(descriptor.m_BiasEnabled)
677 {
678 if(!biases.IsConstant())
679 {
Francis Murtaghb28e5252022-02-09 14:36:04 +0000680 if (reasonIfUnsupported.has_value())
681 {
682 reasonIfUnsupported.value() =
Cathal Corbett06902652022-04-14 17:55:11 +0100683 "This backend might not support non constant bias. "
684 "If bias are constant make sure to set IsConstant when creating TensorInfo";
Francis Murtaghb28e5252022-02-09 14:36:04 +0000685 }
Matthew Sloyan81beae32021-07-13 19:46:11 +0100686 return false;
687 }
688 }
689
690 // At the first stage we will only print a warning. this is to give
691 // backend developers a chance to adopt and read weights from input slots.
692 ARMNN_LOG(warning) << "The backend makes use of a deprecated interface to read constant tensors. "
693 "If you are a backend developer please find more information in our "
694 "doxygen documentation on github https://github.com/ARM-software/armnn "
695 "under the keyword 'ConstTensorsAsInputs'.";
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000696 }
Matthew Sloyan81beae32021-07-13 19:46:11 +0100697
698 if(!descriptor.m_ConstantWeights)
699 {
Francis Murtaghb28e5252022-02-09 14:36:04 +0000700 capability = GetCapability("NonConstWeights", m_BackendId);
Matthew Sloyan81beae32021-07-13 19:46:11 +0100701 if (capability.has_value() && capability.value().GetValue().AsBool() == true)
702 {
703 return true;
704 }
705 return false;
706 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000707 }
708
Cathal Corbett34b429c2021-12-24 12:24:40 +0000709 TensorInfos infos{input, output, weights, biases};
710
711 return m_LayerSupport->IsLayerSupported(LayerType::FullyConnected,
712 infos,
713 descriptor,
714 EmptyOptional(),
715 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000716 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000717}
718
719bool LayerSupportHandle::IsGatherSupported(const TensorInfo& input0,
720 const TensorInfo& input1,
721 const TensorInfo& output,
Francis Murtagh7909c532021-01-28 14:25:15 +0000722 const GatherDescriptor& descriptor,
723 Optional<std::string&> reasonIfUnsupported)
724{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000725 TensorInfos infos{input0, input1, output};
726
727 return m_LayerSupport->IsLayerSupported(LayerType::Gather,
728 infos,
729 descriptor,
730 EmptyOptional(),
731 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000732 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000733}
734
Teresa Charlinb2d3ec52022-04-12 22:07:09 +0100735bool LayerSupportHandle::IsGatherNdSupported(const TensorInfo& input0,
736 const TensorInfo& input1,
737 const TensorInfo& output,
738 Optional<std::string&> reasonIfUnsupported)
739{
740 TensorInfos infos{input0, input1, output};
741
742 return m_LayerSupport->IsLayerSupported(LayerType::GatherNd,
743 infos,
744 BaseDescriptor(),
745 EmptyOptional(),
746 EmptyOptional(),
747 reasonIfUnsupported);
748}
749
Francis Murtagh7909c532021-01-28 14:25:15 +0000750bool LayerSupportHandle::IsInputSupported(const TensorInfo& input,
751 Optional<std::string&> reasonIfUnsupported)
752{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000753 TensorInfos infos{input};
754
755 return m_LayerSupport->IsLayerSupported(LayerType::Input,
756 infos,
757 BaseDescriptor(),
758 EmptyOptional(),
759 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000760 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000761}
762
763bool LayerSupportHandle::IsInstanceNormalizationSupported(
764 const TensorInfo& input,
765 const TensorInfo& output,
766 const InstanceNormalizationDescriptor& descriptor,
767 Optional<std::string&> reasonIfUnsupported)
768{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000769 TensorInfos infos{input, output};
770
771 return m_LayerSupport->IsLayerSupported(LayerType::InstanceNormalization,
772 infos,
773 descriptor,
774 EmptyOptional(),
775 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000776 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000777}
778
779bool LayerSupportHandle::IsL2NormalizationSupported(const TensorInfo& input,
780 const TensorInfo& output,
781 const L2NormalizationDescriptor& descriptor,
782 Optional<std::string&> reasonIfUnsupported)
783{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000784 TensorInfos infos{input, output};
785
786 return m_LayerSupport->IsLayerSupported(LayerType::L2Normalization,
787 infos,
788 descriptor,
789 EmptyOptional(),
790 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000791 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000792}
793
794bool LayerSupportHandle::IsLogicalBinarySupported(const TensorInfo& input0,
795 const TensorInfo& input1,
796 const TensorInfo& output,
797 const LogicalBinaryDescriptor& descriptor,
798 Optional<std::string&> reasonIfUnsupported)
799{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000800 TensorInfos infos{input0, input1, output};
801
802 return m_LayerSupport->IsLayerSupported(LayerType::LogicalBinary,
803 infos,
804 descriptor,
805 EmptyOptional(),
806 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000807 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000808}
809
810bool LayerSupportHandle::IsLogicalUnarySupported(const TensorInfo& input,
811 const TensorInfo& output,
812 const ElementwiseUnaryDescriptor& descriptor,
813 Optional<std::string&> reasonIfUnsupported)
814{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000815 TensorInfos infos{input, output};
816
817 return m_LayerSupport->IsLayerSupported(LayerType::ElementwiseUnary,
818 infos,
819 descriptor,
820 EmptyOptional(),
821 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000822 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000823}
824
825bool LayerSupportHandle::IsLogSoftmaxSupported(const TensorInfo& input,
826 const TensorInfo& output,
827 const LogSoftmaxDescriptor& descriptor,
828 Optional<std::string&> reasonIfUnsupported)
829{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000830 TensorInfos infos{input, output};
831
832 return m_LayerSupport->IsLayerSupported(LayerType::LogSoftmax,
833 infos,
834 descriptor,
835 EmptyOptional(),
836 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000837 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000838}
839
840bool LayerSupportHandle::IsLstmSupported(const TensorInfo& input,
841 const TensorInfo& outputStateIn,
842 const TensorInfo& cellStateIn,
843 const TensorInfo& scratchBuffer,
844 const TensorInfo& outputStateOut,
845 const TensorInfo& cellStateOut,
846 const TensorInfo& output,
847 const LstmDescriptor& descriptor,
848 const LstmInputParamsInfo& paramsInfo,
849 Optional<std::string&> reasonIfUnsupported)
850{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000851 TensorInfos infos{input, outputStateIn, cellStateIn, scratchBuffer, outputStateOut, cellStateOut, output};
852
853 return m_LayerSupport->IsLayerSupported(LayerType::Lstm,
854 infos,
855 descriptor,
856 paramsInfo,
857 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000858 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000859}
860
861bool LayerSupportHandle::IsMaximumSupported(const TensorInfo& input0,
862 const TensorInfo& input1,
863 const TensorInfo& output,
864 Optional<std::string&> reasonIfUnsupported)
865{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000866 TensorInfos infos{input0, input1, output};
867
868 return m_LayerSupport->IsLayerSupported(LayerType::Maximum,
869 infos,
870 BaseDescriptor(),
871 EmptyOptional(),
872 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000873 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000874}
875
876bool LayerSupportHandle::IsMeanSupported(const TensorInfo& input,
877 const TensorInfo& output,
878 const MeanDescriptor& descriptor,
879 Optional<std::string&> reasonIfUnsupported)
880{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000881 TensorInfos infos{input, output};
882
883 return m_LayerSupport->IsLayerSupported(LayerType::Mean,
884 infos,
885 descriptor,
886 EmptyOptional(),
887 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000888 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000889}
890
891bool LayerSupportHandle::IsMemCopySupported(const TensorInfo& input,
892 const TensorInfo& output,
893 Optional<std::string&> reasonIfUnsupported)
894{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000895 TensorInfos infos{input, output};
896
897 return m_LayerSupport->IsLayerSupported(LayerType::MemCopy,
898 infos,
899 BaseDescriptor(),
900 EmptyOptional(),
901 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000902 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000903}
904
905bool LayerSupportHandle::IsMemImportSupported(const TensorInfo& input,
906 const TensorInfo& output,
907 Optional<std::string&> reasonIfUnsupported)
908{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000909 TensorInfos infos{input, output};
910
911 return m_LayerSupport->IsLayerSupported(LayerType::MemImport,
912 infos,
913 BaseDescriptor(),
914 EmptyOptional(),
915 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000916 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000917}
918
919bool LayerSupportHandle::IsMergeSupported(const TensorInfo& input0,
920 const TensorInfo& input1,
921 const TensorInfo& output,
922 Optional<std::string&> reasonIfUnsupported)
923{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000924 TensorInfos infos{input0, input1, output};
925
926 return m_LayerSupport->IsLayerSupported(LayerType::Merge,
927 infos,
928 BaseDescriptor(),
929 EmptyOptional(),
930 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000931 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000932}
933
Francis Murtagh7909c532021-01-28 14:25:15 +0000934bool LayerSupportHandle::IsMinimumSupported(const TensorInfo& input0,
935 const TensorInfo& input1,
936 const TensorInfo& output,
937 Optional<std::string&> reasonIfUnsupported)
938{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000939 TensorInfos infos{input0, input1, output};
940
941 return m_LayerSupport->IsLayerSupported(LayerType::Minimum,
942 infos,
943 BaseDescriptor(),
944 EmptyOptional(),
945 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000946 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000947}
948
949bool LayerSupportHandle::IsMultiplicationSupported(const TensorInfo& input0,
950 const TensorInfo& input1,
951 const TensorInfo& output,
952 Optional<std::string&> reasonIfUnsupported)
953{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000954 TensorInfos infos{input0, input1, output};
955
956 return m_LayerSupport->IsLayerSupported(LayerType::Multiplication,
957 infos,
958 BaseDescriptor(),
959 EmptyOptional(),
960 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000961 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000962}
963
964bool LayerSupportHandle::IsNormalizationSupported(const TensorInfo& input,
965 const TensorInfo& output,
966 const NormalizationDescriptor& descriptor,
967 Optional<std::string&> reasonIfUnsupported)
968{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000969 TensorInfos infos{input, output};
970
971 return m_LayerSupport->IsLayerSupported(LayerType::Normalization,
972 infos,
973 descriptor,
974 EmptyOptional(),
975 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000976 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000977}
978
979bool LayerSupportHandle::IsOutputSupported(const TensorInfo& output,
980 Optional<std::string&> reasonIfUnsupported)
981{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000982 TensorInfos infos{output};
983
984 return m_LayerSupport->IsLayerSupported(LayerType::Output,
985 infos,
986 BaseDescriptor(),
987 EmptyOptional(),
988 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +0000989 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +0000990}
991
992bool LayerSupportHandle::IsPadSupported(const TensorInfo& input,
993 const TensorInfo& output,
994 const PadDescriptor& descriptor,
995 Optional<std::string&> reasonIfUnsupported)
996{
Cathal Corbett34b429c2021-12-24 12:24:40 +0000997 TensorInfos infos{input, output};
998
999 return m_LayerSupport->IsLayerSupported(LayerType::Pad,
1000 infos,
1001 descriptor,
1002 EmptyOptional(),
1003 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001004 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001005}
1006
1007bool LayerSupportHandle::IsPermuteSupported(const TensorInfo& input,
1008 const TensorInfo& output,
1009 const PermuteDescriptor& descriptor,
1010 Optional<std::string&> reasonIfUnsupported)
1011{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001012 TensorInfos infos{input, output};
1013
1014 return m_LayerSupport->IsLayerSupported(LayerType::Permute,
1015 infos,
1016 descriptor,
1017 EmptyOptional(),
1018 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001019 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001020}
1021
1022bool LayerSupportHandle::IsPooling2dSupported(const TensorInfo& input,
1023 const TensorInfo& output,
1024 const Pooling2dDescriptor& descriptor,
1025 Optional<std::string&> reasonIfUnsupported)
1026{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001027 TensorInfos infos{input, output};
1028
1029 return m_LayerSupport->IsLayerSupported(LayerType::Pooling2d,
1030 infos,
1031 descriptor,
1032 EmptyOptional(),
1033 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001034 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001035}
1036
Tamás Nyíri7b885b32021-10-26 14:47:57 +01001037bool LayerSupportHandle::IsPooling3dSupported(const TensorInfo& input,
1038 const TensorInfo& output,
1039 const Pooling3dDescriptor& descriptor,
1040 Optional<std::string&> reasonIfUnsupported)
1041{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001042 TensorInfos infos{input, output};
1043
1044 return m_LayerSupport->IsLayerSupported(LayerType::Pooling3d,
1045 infos,
1046 descriptor,
1047 EmptyOptional(),
1048 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001049 reasonIfUnsupported);
Tamás Nyíri7b885b32021-10-26 14:47:57 +01001050}
1051
Francis Murtagh7909c532021-01-28 14:25:15 +00001052bool LayerSupportHandle::IsPreCompiledSupported(const TensorInfo& input,
1053 const PreCompiledDescriptor& descriptor,
1054 Optional<std::string&> reasonIfUnsupported)
1055{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001056 TensorInfos infos{input};
1057
1058 return m_LayerSupport->IsLayerSupported(LayerType::PreCompiled,
1059 infos,
1060 descriptor,
1061 EmptyOptional(),
1062 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001063 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001064}
1065
1066bool LayerSupportHandle::IsPreluSupported(const TensorInfo& input,
1067 const TensorInfo& alpha,
1068 const TensorInfo& output,
1069 Optional<std::string&> reasonIfUnsupported)
1070{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001071 TensorInfos infos{input, alpha, output};
1072
1073 return m_LayerSupport->IsLayerSupported(LayerType::Prelu,
1074 infos,
1075 BaseDescriptor(),
1076 EmptyOptional(),
1077 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001078 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001079}
1080
1081bool LayerSupportHandle::IsQuantizeSupported(const TensorInfo& input,
1082 const TensorInfo& output,
1083 Optional<std::string&> reasonIfUnsupported)
1084{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001085 TensorInfos infos{input, output};
1086
1087 return m_LayerSupport->IsLayerSupported(LayerType::Quantize,
1088 infos,
1089 BaseDescriptor(),
1090 EmptyOptional(),
1091 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001092 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001093}
1094
1095bool LayerSupportHandle::IsQLstmSupported(const TensorInfo& input,
1096 const TensorInfo& previousOutputIn,
1097 const TensorInfo& previousCellStateIn,
1098 const TensorInfo& outputStateOut,
1099 const TensorInfo& cellStateOut,
1100 const TensorInfo& output,
1101 const QLstmDescriptor& descriptor,
1102 const LstmInputParamsInfo& paramsInfo,
1103 Optional<std::string&> reasonIfUnsupported)
1104{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001105 TensorInfos infos{input, previousOutputIn, previousCellStateIn, outputStateOut, cellStateOut, output};
1106
1107 return m_LayerSupport->IsLayerSupported(LayerType::QLstm,
1108 infos,
Francis Murtagh7909c532021-01-28 14:25:15 +00001109 descriptor,
1110 paramsInfo,
Cathal Corbett34b429c2021-12-24 12:24:40 +00001111 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001112 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001113}
1114
1115bool LayerSupportHandle::IsQuantizedLstmSupported(const TensorInfo& input,
1116 const TensorInfo& previousCellStateIn,
1117 const TensorInfo& previousOutputIn,
1118 const TensorInfo& cellStateOut,
1119 const TensorInfo& output,
1120 const QuantizedLstmInputParamsInfo& paramsInfo,
1121 Optional<std::string&> reasonIfUnsupported)
1122{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001123 TensorInfos infos{input, previousCellStateIn, previousOutputIn, cellStateOut, output};
1124
1125 return m_LayerSupport->IsLayerSupported(LayerType::QuantizedLstm,
1126 infos,
1127 BaseDescriptor(),
1128 EmptyOptional(),
1129 paramsInfo,
Francis Murtaghb28e5252022-02-09 14:36:04 +00001130 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001131}
1132
1133bool LayerSupportHandle::IsRankSupported(const TensorInfo& input,
1134 const TensorInfo& output,
1135 Optional<std::string&> reasonIfUnsupported)
1136{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001137 TensorInfos infos{input, output};
1138
1139 return m_LayerSupport->IsLayerSupported(LayerType::Rank,
1140 infos,
1141 BaseDescriptor(),
1142 EmptyOptional(),
1143 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001144 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001145}
1146
Sadik Armagana2747482021-02-09 10:28:54 +00001147bool LayerSupportHandle::IsReduceSupported(const TensorInfo& input,
1148 const TensorInfo& output,
1149 const ReduceDescriptor& descriptor,
1150 Optional<std::string&> reasonIfUnsupported)
1151{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001152 TensorInfos infos{input, output};
1153
1154 return m_LayerSupport->IsLayerSupported(LayerType::Reduce,
1155 infos,
1156 descriptor,
1157 EmptyOptional(),
1158 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001159 reasonIfUnsupported);
Sadik Armagana2747482021-02-09 10:28:54 +00001160}
1161
Francis Murtagh7909c532021-01-28 14:25:15 +00001162bool LayerSupportHandle::IsReshapeSupported(const TensorInfo& input,
1163 const TensorInfo& output,
1164 const ReshapeDescriptor& descriptor,
1165 Optional<std::string&> reasonIfUnsupported)
1166{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001167 TensorInfos infos{input, output};
1168
1169 return m_LayerSupport->IsLayerSupported(LayerType::Reshape,
1170 infos,
1171 descriptor,
1172 EmptyOptional(),
1173 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001174 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001175}
1176
Francis Murtagh7909c532021-01-28 14:25:15 +00001177bool LayerSupportHandle::IsResizeSupported(const TensorInfo& input,
1178 const TensorInfo& output,
1179 const ResizeDescriptor& descriptor,
1180 Optional<std::string&> reasonIfUnsupported)
1181{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001182 TensorInfos infos{input, output};
1183
1184 return m_LayerSupport->IsLayerSupported(LayerType::Resize,
1185 infos,
1186 descriptor,
1187 EmptyOptional(),
1188 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001189 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001190}
1191
Keith Davis3ae3f972021-05-21 16:33:48 +01001192bool LayerSupportHandle::IsShapeSupported(const TensorInfo& input,
1193 const TensorInfo& output,
1194 Optional<std::string&> reasonIfUnsupported)
1195{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001196 TensorInfos infos{input, output};
1197
1198 return m_LayerSupport->IsLayerSupported(LayerType::Shape,
1199 infos,
1200 BaseDescriptor(),
1201 EmptyOptional(),
1202 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001203 reasonIfUnsupported);
Keith Davis3ae3f972021-05-21 16:33:48 +01001204}
1205
Francis Murtagh7909c532021-01-28 14:25:15 +00001206bool LayerSupportHandle::IsSliceSupported(const TensorInfo& input,
1207 const TensorInfo& output,
1208 const SliceDescriptor& descriptor,
1209 Optional<std::string&> reasonIfUnsupported)
1210{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001211 TensorInfos infos{input, output};
1212
1213 return m_LayerSupport->IsLayerSupported(LayerType::Slice,
1214 infos,
1215 descriptor,
1216 EmptyOptional(),
1217 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001218 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001219}
1220
1221bool LayerSupportHandle::IsSoftmaxSupported(const TensorInfo& input,
1222 const TensorInfo& output,
1223 const SoftmaxDescriptor& descriptor,
1224 Optional<std::string&> reasonIfUnsupported)
1225{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001226 TensorInfos infos{input, output};
1227
1228 return m_LayerSupport->IsLayerSupported(LayerType::Softmax,
1229 infos,
1230 descriptor,
1231 EmptyOptional(),
1232 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001233 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001234}
1235
1236bool LayerSupportHandle::IsSpaceToBatchNdSupported(const TensorInfo& input,
1237 const TensorInfo& output,
1238 const SpaceToBatchNdDescriptor& descriptor,
1239 Optional<std::string&> reasonIfUnsupported)
1240{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001241 TensorInfos infos{input, output};
1242
1243 return m_LayerSupport->IsLayerSupported(LayerType::SpaceToBatchNd,
1244 infos,
1245 descriptor,
1246 EmptyOptional(),
1247 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001248 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001249}
1250
1251bool LayerSupportHandle::IsSpaceToDepthSupported(const TensorInfo& input,
1252 const TensorInfo& output,
1253 const SpaceToDepthDescriptor& descriptor,
1254 Optional<std::string&> reasonIfUnsupported)
1255{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001256 TensorInfos infos{input, output};
1257
1258 return m_LayerSupport->IsLayerSupported(LayerType::SpaceToDepth,
1259 infos,
1260 descriptor,
1261 EmptyOptional(),
1262 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001263 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001264}
1265
1266bool LayerSupportHandle::IsSplitterSupported(const TensorInfo& input,
Francis Murtagh7909c532021-01-28 14:25:15 +00001267 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1268 const ViewsDescriptor& descriptor,
1269 Optional<std::string&> reasonIfUnsupported)
1270{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001271 TensorInfos infos{input};
1272 for (TensorInfo outInfo : outputs)
1273 {
1274 infos.push_back(outInfo);
1275 }
1276
1277 return m_LayerSupport->IsLayerSupported(LayerType::Splitter,
1278 infos,
1279 descriptor,
1280 EmptyOptional(),
1281 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001282 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001283}
1284
1285bool LayerSupportHandle::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1286 const TensorInfo& output,
1287 const StackDescriptor& descriptor,
1288 Optional<std::string&> reasonIfUnsupported)
1289{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001290 TensorInfos infos;
1291 for (const TensorInfo* inputInfo : inputs)
1292 {
1293 infos.push_back(*inputInfo);
1294 }
1295 infos.push_back(output);
1296
1297 return m_LayerSupport->IsLayerSupported(LayerType::Stack,
1298 infos,
1299 descriptor,
1300 EmptyOptional(),
1301 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001302 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001303}
1304
1305bool LayerSupportHandle::IsStandInSupported(const std::vector<const TensorInfo*>& inputs,
1306 const std::vector<const TensorInfo*>& outputs,
1307 const StandInDescriptor& descriptor,
1308 Optional<std::string&> reasonIfUnsupported)
1309{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001310 TensorInfos infos;
1311 for (const TensorInfo* inputInfo : inputs)
1312 {
1313 infos.push_back(*inputInfo);
1314 }
1315 for (const TensorInfo* outputInfo : outputs)
1316 {
1317 infos.push_back(*outputInfo);
1318 }
1319
1320 return m_LayerSupport->IsLayerSupported(LayerType::StandIn,
1321 infos,
1322 descriptor,
1323 EmptyOptional(),
1324 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001325 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001326}
1327
1328
1329bool LayerSupportHandle::IsStridedSliceSupported(const TensorInfo& input,
1330 const TensorInfo& output,
1331 const StridedSliceDescriptor& descriptor,
1332 Optional<std::string&> reasonIfUnsupported)
1333{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001334 TensorInfos infos{input, output};
1335
1336 return m_LayerSupport->IsLayerSupported(LayerType::StridedSlice,
1337 infos,
1338 descriptor,
1339 EmptyOptional(),
1340 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001341 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001342}
1343
1344bool LayerSupportHandle::IsSubtractionSupported(const TensorInfo& input0,
1345 const TensorInfo& input1,
1346 const TensorInfo& output,
1347 Optional<std::string&> reasonIfUnsupported)
1348{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001349 TensorInfos infos{input0, input1, output};
1350
1351 return m_LayerSupport->IsLayerSupported(LayerType::Subtraction,
1352 infos,
1353 BaseDescriptor(),
1354 EmptyOptional(),
1355 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001356 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001357}
1358
1359bool LayerSupportHandle::IsSwitchSupported(const TensorInfo& input0,
1360 const TensorInfo& input1,
1361 const TensorInfo& output0,
1362 const TensorInfo& output1,
1363 Optional<std::string&> reasonIfUnsupported)
1364{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001365 TensorInfos infos{input0, input1, output0, output1};
1366
1367 return m_LayerSupport->IsLayerSupported(LayerType::Switch,
1368 infos,
1369 BaseDescriptor(),
1370 EmptyOptional(),
1371 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001372 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001373}
1374
1375bool LayerSupportHandle::IsTransposeConvolution2dSupported(
1376 const TensorInfo& input,
1377 const TensorInfo& output,
1378 const TransposeConvolution2dDescriptor& descriptor,
1379 const TensorInfo& weights,
1380 const Optional<TensorInfo>& biases,
1381 Optional<std::string&> reasonIfUnsupported)
1382{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001383 TensorInfo biasesVal = biases.has_value() ? biases.value() : TensorInfo();
1384 TensorInfos infos{input, output, weights, biasesVal};
1385
1386 return m_LayerSupport->IsLayerSupported(LayerType::TransposeConvolution2d,
1387 infos,
1388 descriptor,
1389 EmptyOptional(),
1390 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001391 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001392}
1393
1394bool LayerSupportHandle::IsTransposeSupported(const TensorInfo& input,
1395 const TensorInfo& output,
1396 const TransposeDescriptor& descriptor,
1397 Optional<std::string&> reasonIfUnsupported)
1398{
Cathal Corbett34b429c2021-12-24 12:24:40 +00001399 TensorInfos infos{input, output};
1400
1401 return m_LayerSupport->IsLayerSupported(LayerType::Transpose,
1402 infos,
1403 descriptor,
1404 EmptyOptional(),
1405 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001406 reasonIfUnsupported);
Francis Murtagh7909c532021-01-28 14:25:15 +00001407}
1408
Francis Murtagh3570a382022-05-18 11:46:10 +01001409// Forwarding function to maintain ABI stability
1410bool LayerSupportHandle::IsUnidirectionalSequenceLstmSupported(const TensorInfo& input,
1411 const TensorInfo& outputStateIn,
1412 const TensorInfo& cellStateIn,
1413 const TensorInfo& output,
1414 const Optional<TensorInfo>& hiddenStateOutput,
1415 const Optional<TensorInfo>& cellStateOutput,
1416 const LstmDescriptor& descriptor,
1417 const LstmInputParamsInfo& paramsInfo,
1418 Optional<std::string&> reasonIfUnsupported)
1419{
1420 TensorInfo hiddenStateOutputVal = hiddenStateOutput.has_value() ? hiddenStateOutput.value() : TensorInfo();
1421 TensorInfo cellStateOutputVal = cellStateOutput.has_value() ? cellStateOutput.value() : TensorInfo();
1422 TensorInfos infos{input, outputStateIn, cellStateIn, hiddenStateOutputVal, cellStateOutputVal, output};
1423
1424 return IsUnidirectionalSequenceLstmSupported(input,
1425 outputStateIn,
1426 cellStateIn,
1427 hiddenStateOutputVal,
1428 cellStateOutputVal,
1429 output,
1430 descriptor,
1431 paramsInfo,
1432 reasonIfUnsupported);
1433}
1434
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001435bool LayerSupportHandle::IsUnidirectionalSequenceLstmSupported(const TensorInfo& input,
1436 const TensorInfo& outputStateIn,
1437 const TensorInfo& cellStateIn,
Mike Kelly12994962022-04-21 11:57:09 +01001438 const TensorInfo& outputStateOut,
1439 const TensorInfo& cellStateOut,
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001440 const TensorInfo& output,
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001441 const LstmDescriptor& descriptor,
1442 const LstmInputParamsInfo& paramsInfo,
1443 Optional<std::string&> reasonIfUnsupported)
1444{
Mike Kelly12994962022-04-21 11:57:09 +01001445 TensorInfos infos{input, outputStateIn, cellStateIn, outputStateOut, cellStateOut, output};
Cathal Corbett34b429c2021-12-24 12:24:40 +00001446
1447 return m_LayerSupport->IsLayerSupported(LayerType::UnidirectionalSequenceLstm,
1448 infos,
1449 descriptor,
1450 paramsInfo,
1451 EmptyOptional(),
Francis Murtaghb28e5252022-02-09 14:36:04 +00001452 reasonIfUnsupported);
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001453}
1454
Francis Murtagh7909c532021-01-28 14:25:15 +00001455}