blob: c0ede678bfd72194d1e796433b7d053f929abc2d [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "RefLayerSupport.hpp"
David Beck3cc9a622018-10-12 10:38:31 +01007
Keith Davis0c2eeac2020-02-11 16:51:50 +00008#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009#include <armnn/Types.hpp>
Derek Lamberti50db4e82019-03-13 14:16:15 +000010#include <armnn/Descriptors.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000011#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010012#include <armnn/utility/NumericCast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <LayerSupportCommon.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010015#include <backendsCommon/LayerSupportRules.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000016
Derek Lamberti50db4e82019-03-13 14:16:15 +000017#include <vector>
Derek Lamberti50db4e82019-03-13 14:16:15 +000018#include <array>
19
telsoa014fcda012018-03-09 14:13:49 +000020namespace armnn
21{
22
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010023namespace
24{
25
26template<typename Float32Func, typename Uint8Func, typename ... Params>
27bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
28 DataType dataType,
29 Float32Func floatFuncPtr,
30 Uint8Func uint8FuncPtr,
31 Params&&... params)
32{
33 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
34 dataType,
35 &FalseFunc<Params...>,
36 floatFuncPtr,
37 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +000038 &FalseFunc<Params...>,
kevmay012b4d88e2019-01-24 14:05:09 +000039 &FalseFunc<Params...>,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010040 std::forward<Params>(params)...);
41}
42
43} // anonymous namespace
44
James Conroy4d1ff582019-06-10 17:06:39 +010045namespace
46{
47
48std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
49 unsigned int actual,
50 std::string& layerStr,
51 std::string& tensorName)
52{
53 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
54 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
55
56 return errorMsg;
57}
58
59} // anonymous namespace
Derek Lamberti50db4e82019-03-13 14:16:15 +000060
Sadik Armagan9199e582019-09-05 17:35:31 +010061bool RefLayerSupport::IsAbsSupported(const TensorInfo& input, const TensorInfo& output,
62 Optional<std::string&> reasonIfUnsupported) const
63{
josh minor4a3c6102020-01-06 16:40:46 -060064 return IsElementwiseUnarySupported(input,
65 output,
66 ElementwiseUnaryDescriptor(UnaryOperation::Abs),
67 reasonIfUnsupported);
Sadik Armagan9199e582019-09-05 17:35:31 +010068}
69
arovir011c7c81b2018-10-08 11:34:28 +010070bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
71 const TensorInfo& output,
72 const ActivationDescriptor& descriptor,
73 Optional<std::string&> reasonIfUnsupported) const
74{
Derek Lamberti50db4e82019-03-13 14:16:15 +000075 bool supported = true;
76
77 // Define supported types.
Keith Davis0c2eeac2020-02-11 16:51:50 +000078 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000079 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +000080 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +010081 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +000082 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +000083 DataType::QAsymmU8,
84 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +000085 };
86
87 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
88 "Reference activation: input type not supported.");
89
90 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
91 "Reference activation: output type not supported.");
92
93 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
94 "Reference activation: input and output types mismatched.");
95
96 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
97 "Reference activation: input and output shapes are of different rank.");
98
99
100 struct ActivationFunctionSupported : public Rule
101 {
102 ActivationFunctionSupported(const ActivationDescriptor& desc)
103 {
104 switch(desc.m_Function)
105 {
106 case ActivationFunction::Abs:
107 case ActivationFunction::BoundedReLu:
David Monahan3b3c3812020-02-25 09:03:29 +0000108 case ActivationFunction::Elu:
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000109 case ActivationFunction::HardSwish:
Derek Lamberti50db4e82019-03-13 14:16:15 +0000110 case ActivationFunction::LeakyReLu:
111 case ActivationFunction::Linear:
112 case ActivationFunction::ReLu:
113 case ActivationFunction::Sigmoid:
114 case ActivationFunction::SoftReLu:
115 case ActivationFunction::Sqrt:
116 case ActivationFunction::Square:
117 case ActivationFunction::TanH:
118 {
119 m_Res = true;
120 break;
121 }
122 default:
123 {
124 m_Res = false;
125 break;
126 }
127 }
128 }
129 };
130
131 // Function is supported
132 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
133 "Reference activation: function not supported.");
134
135 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100136}
137
138bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
139 const TensorInfo& input1,
140 const TensorInfo& output,
141 Optional<std::string&> reasonIfUnsupported) const
142{
Derek Lamberti50db4e82019-03-13 14:16:15 +0000143 bool supported = true;
144
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100145 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000146 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +0000147 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100148 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000149 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000150 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100151 DataType::QSymmS16,
152 DataType::Signed32
Derek Lamberti50db4e82019-03-13 14:16:15 +0000153 };
154
155 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
156 "Reference addition: input 0 is not a supported type.");
157
158 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
159 "Reference addition: input 1 is not a supported type.");
160
161 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
162 "Reference addition: output is not a supported type.");
163
164 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
165 "Reference addition: input 0 and Input 1 types are mismatched");
166
167 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
168 "Reference addition: input and output types are mismatched");
169
170 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
171 "Reference addition: shapes are not suitable for implicit broadcast.");
172
173 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100174}
175
Nikhil Raj68c2c902019-09-19 11:21:11 +0100176bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
177 const armnn::ArgMinMaxDescriptor &descriptor,
178 armnn::Optional<std::string &> reasonIfUnsupported) const
179{
Jan Eilers8eb25602020-03-09 12:13:48 +0000180 IgnoreUnused(descriptor);
Nikhil Raj68c2c902019-09-19 11:21:11 +0100181
Mike Kelly1f140f72021-04-06 12:25:55 +0100182 std::array<DataType, 8> supportedInputTypes =
Nikhil Raj68c2c902019-09-19 11:21:11 +0100183 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000184 DataType::BFloat16,
Teresa Charline300b362020-05-25 10:01:03 +0100185 DataType::Float16,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100186 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +0100187 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000188 DataType::QAsymmU8,
189 DataType::QSymmS16,
Mike Kelly1f140f72021-04-06 12:25:55 +0100190 DataType::Signed32,
191 DataType::Signed64
192 };
193
194 std::array<DataType,2> supportedOutputTypes = {
195 DataType::Signed32,
196 DataType::Signed64
Nikhil Raj68c2c902019-09-19 11:21:11 +0100197 };
198
199 bool supported = true;
200
Mike Kelly1f140f72021-04-06 12:25:55 +0100201 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100202 "Reference ArgMinMax: input is not a supported type.");
Mike Kelly1f140f72021-04-06 12:25:55 +0100203 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100204 "Reference ArgMinMax: output type not supported");
205
206 return supported;
207}
208
arovir011c7c81b2018-10-08 11:34:28 +0100209bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
210 const TensorInfo& output,
211 const TensorInfo& mean,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100212 const TensorInfo& variance,
arovir011c7c81b2018-10-08 11:34:28 +0100213 const TensorInfo& beta,
214 const TensorInfo& gamma,
215 const BatchNormalizationDescriptor& descriptor,
216 Optional<std::string&> reasonIfUnsupported) const
217{
Jan Eilers8eb25602020-03-09 12:13:48 +0000218 IgnoreUnused(descriptor);
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100219
Sadik Armagan303980c2020-04-17 12:45:14 +0100220 std::array<DataType, 6> supportedTypes =
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100221 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000222 DataType::BFloat16,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100223 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100224 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100225 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000226 DataType::QAsymmU8,
227 DataType::QSymmS16
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100228 };
229
230 bool supported = true;
231
232 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
233 "Reference batch normalization: input is not a supported type.");
234
235 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
236 "Reference batch normalization: output is not a supported type.");
237
238 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
239 "Reference batch normalization: input and output types are mismatched");
240
241 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
242 "Reference batch normalization: mean is not a supported type.");
243
244 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
245 "Reference batch normalization: variance is not a supported type.");
246
247 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
248 "Reference batch normalization: beta is not a supported type.");
249
250 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
251 "Reference batch normalization: gamma is not a supported type.");
252
253 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100254}
255
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000256bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
257 const TensorInfo& output,
258 const BatchToSpaceNdDescriptor& descriptor,
259 Optional<std::string&> reasonIfUnsupported) const
260{
Jan Eilers8eb25602020-03-09 12:13:48 +0000261 IgnoreUnused(descriptor);
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100262
263 bool supported = true;
264
265 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
266 std::string inputTensorStr = "input";
267 std::string outputTensorStr = "output";
268
269 // Define supported types.
Sadik Armagan303980c2020-04-17 12:45:14 +0100270 std::array<DataType,6> supportedTypes =
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100271 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000272 DataType::BFloat16,
273 DataType::Float32,
274 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100275 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000276 DataType::QAsymmU8,
277 DataType::QSymmS16
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100278 };
279
280 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
281 "Reference BatchToSpaceNd: input type not supported.");
282
283 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
284 "Reference BatchToSpaceNd: output type not supported.");
285
286 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
287 "Reference BatchToSpaceNd: input and output types mismatched.");
288
289 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
290 reasonIfUnsupported,
291 CreateIncorrectDimensionsErrorMsg(4,
292 output.GetNumDimensions(),
293 batchToSpaceNdLayerStr,
294 outputTensorStr).data());
295
296 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
297 reasonIfUnsupported,
298 CreateIncorrectDimensionsErrorMsg(4,
299 input.GetNumDimensions(),
300 batchToSpaceNdLayerStr,
301 inputTensorStr).data());
302
303 return supported;
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000304}
305
mathad01b392e982021-04-07 12:07:30 +0100306bool RefLayerSupport::IsCastSupported(const TensorInfo& input,
307 const TensorInfo& output,
308 Optional<std::string&> reasonIfUnsupported) const
309{
310 std::array<DataType, 9> supportedInputTypes =
311 {
312 DataType::BFloat16,
313 DataType::Float32,
314 DataType::Float16,
315 DataType::QSymmS8,
316 DataType::QAsymmS8,
317 DataType::QAsymmU8,
318 DataType::QSymmS16,
319 DataType::Signed32
320 };
321
322 bool supported = true;
323 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
324 "Reference cast: input is not a supported type");
325
326
327 supported &= CheckSupportRule(TypeAnyOf(output, supportedInputTypes), reasonIfUnsupported,
328 "Reference cast: output is not a supported type");
329
330 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
331 "Reference cast: input and output shapes have different number of total elements");
332
333 return supported;
334}
335
Simon Obute51f67772021-09-03 15:50:13 +0100336bool RefLayerSupport::IsChannelShuffleSupported(const TensorInfo& input,
337 const TensorInfo& output,
338 const ChannelShuffleDescriptor& descriptor,
339 Optional<std::string&> reasonIfUnsupported) const
340{
341 IgnoreUnused(descriptor);
342 bool supported = true;
343
344 // Define supported output and inputs types.
345 std::array<DataType, 7> supportedTypes =
346 {
347 DataType::BFloat16,
348 DataType::Float32,
349 DataType::Float16,
350 DataType::QAsymmS8,
351 DataType::QAsymmU8,
352 DataType::QSymmS8,
353 DataType::QSymmS16
354 };
355
356 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
357 "Reference ChannelShuffle: input is not a supported type.");
358
359 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
360 "Reference ChannelShuffle: output is not a supported type.");
361
362 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
363 "Reference ChannelShuffle: input and output types are mismatched.");
364
365 return supported;
366}
367
368
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100369bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
370 const TensorInfo& input1,
371 const TensorInfo& output,
372 const ComparisonDescriptor& descriptor,
373 Optional<std::string&> reasonIfUnsupported) const
374{
Jan Eilers8eb25602020-03-09 12:13:48 +0000375 IgnoreUnused(descriptor);
Sadik Armagan303980c2020-04-17 12:45:14 +0100376 std::array<DataType, 8> supportedInputTypes =
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100377 {
Sadik Armaganb60dd242020-03-19 13:53:16 +0000378 DataType::Boolean,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000379 DataType::BFloat16,
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100380 DataType::Float32,
381 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100382 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000383 DataType::QAsymmU8,
Sadik Armaganb60dd242020-03-19 13:53:16 +0000384 DataType::QSymmS16,
385 DataType::Signed32
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100386 };
387
388 bool supported = true;
389 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
390 "Reference comparison: input 0 is not a supported type");
391
392 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
393 "Reference comparison: input 0 and Input 1 types are mismatched");
394
395 supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
396 "Reference comparison: output is not of type Boolean");
397
398 return supported;
399}
400
Jim Flynn906f9462019-05-10 13:55:21 +0100401bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
402 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +0100403 const ConcatDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100404 Optional<std::string&> reasonIfUnsupported) const
405{
Jan Eilers8eb25602020-03-09 12:13:48 +0000406 IgnoreUnused(descriptor);
Jim Flynne242f2d2019-05-22 14:24:13 +0100407
408 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000409 std::array<DataType,6> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100410 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000411 DataType::BFloat16,
412 DataType::Float32,
413 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000414 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100415 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000416 DataType::QSymmS16
Jim Flynne242f2d2019-05-22 14:24:13 +0100417 };
418
419 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
420 "Reference concatenation: output type not supported");
421 for (const TensorInfo* input : inputs)
422 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100423 ARMNN_ASSERT(input != nullptr);
Jim Flynne242f2d2019-05-22 14:24:13 +0100424 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
425 "Reference concatenation: input type not supported");
426
427 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
428 "Reference concatenation: input and output types mismatched.");
429 }
430
431 return supported;
Jim Flynn906f9462019-05-10 13:55:21 +0100432}
433
arovir011c7c81b2018-10-08 11:34:28 +0100434bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
435 Optional<std::string&> reasonIfUnsupported) const
436{
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100437 std::array<DataType,8> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100438 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000439 DataType::BFloat16,
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100440 DataType::Float16,
Nina Drozd58ef2c62019-05-16 12:09:18 +0100441 DataType::Float32,
Keith Davis67e6c542020-02-19 10:08:33 +0000442 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100443 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000444 DataType::QSymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100445 DataType::QSymmS16,
446 DataType::Signed32
Nina Drozd58ef2c62019-05-16 12:09:18 +0100447 };
448
449 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
450 "Reference constant: output is not a supported type.");
arovir011c7c81b2018-10-08 11:34:28 +0100451}
452
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000453bool RefLayerSupport::IsConvertBf16ToFp32Supported(const TensorInfo& input,
454 const TensorInfo& output,
455 Optional<std::string&> reasonIfUnsupported) const
456{
457 bool supported = true;
458
459 supported &= CheckSupportRule(TypeIs(input, DataType::BFloat16), reasonIfUnsupported,
460 "Reference for ConvertBf16ToFp32 layer: input type not supported");
461
462 supported &= CheckSupportRule(TypeIs(output, DataType::Float32), reasonIfUnsupported,
463 "Reference for ConvertBf16ToFp32 layer: output type not supported");
464
465 return supported;
466}
467
arovir011c7c81b2018-10-08 11:34:28 +0100468bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
469 const TensorInfo& output,
470 Optional<std::string&> reasonIfUnsupported) const
471{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100472 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
473 input.GetDataType(),
474 &TrueFunc<>,
475 &FalseInputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000476 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000477 &FalseFuncI32<>,
478 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100479 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
480 output.GetDataType(),
481 &FalseOutputFuncF16<>,
482 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000483 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000484 &FalseFuncI32<>,
485 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100486}
487
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000488bool RefLayerSupport::IsConvertFp32ToBf16Supported(const TensorInfo& input,
489 const TensorInfo& output,
490 Optional<std::string&> reasonIfUnsupported) const
491{
492 bool supported = true;
493
494 supported &= CheckSupportRule(TypeIs(input, DataType::Float32), reasonIfUnsupported,
495 "Reference for ConvertFp32ToBf16 layer: input type not supported");
496
497 supported &= CheckSupportRule(TypeIs(output, DataType::BFloat16), reasonIfUnsupported,
498 "Reference for ConvertFp32ToBf16 layer: output type not supported");
499
500 return supported;
501}
502
arovir011c7c81b2018-10-08 11:34:28 +0100503bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
504 const TensorInfo& output,
505 Optional<std::string&> reasonIfUnsupported) const
506{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100507 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
508 input.GetDataType(),
509 &FalseInputFuncF16<>,
510 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000511 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000512 &FalseFuncI32<>,
513 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100514 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
515 output.GetDataType(),
516 &TrueFunc<>,
517 &FalseOutputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000518 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000519 &FalseFuncI32<>,
520 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100521}
522
523bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
524 const TensorInfo& output,
525 const Convolution2dDescriptor& descriptor,
526 const TensorInfo& weights,
527 const Optional<TensorInfo>& biases,
528 Optional<std::string&> reasonIfUnsupported) const
529{
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100530 bool supported = true;
531
532 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000533 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000534 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000535 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000536 DataType::Float32,
537 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000538 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100539 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000540 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000541 DataType::QSymmS16
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100542 };
543
544 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000545 "Reference Convolution2d: input is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100546
547 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000548 "Reference Convolution2d: output is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100549
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000550 // For Convolution2d, we allow to have BFloat16 input with Float32 output for optimization.
551 if (input.GetDataType() == DataType::BFloat16)
552 {
553 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
554 {
555 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
556 supported = false;
557 }
558 }
559 else
560 {
561 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000562 "Reference Convolution2d: input and output types mismatched.");
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000563 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100564
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000565 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000566 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000567 {
Derek Lambertid466a542020-01-22 15:37:29 +0000568 ARMNN_NO_DEPRECATE_WARN_BEGIN
Keith Davis0c2eeac2020-02-11 16:51:50 +0000569 std::array<DataType, 4> supportedWeightTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000570 {
Sadik Armagan303980c2020-04-17 12:45:14 +0100571 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000572 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +0000573 DataType::QSymmS8,
574 DataType::QuantizedSymm8PerAxis // deprecated
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000575 };
Derek Lambertid466a542020-01-22 15:37:29 +0000576 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000577
578 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000579 "Reference Convolution2d: weights type not supported for quantized input.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000580 }
581 else
582 {
583 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000584 "Reference Convolution2d: weights is not a supported type.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000585
586 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000587 "Reference Convolution2d: input and weights types mismatched.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000588 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100589
590 if (biases.has_value())
591 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000592 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000593 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000594 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000595 DataType::Float32,
596 DataType::Float16,
597 DataType::Signed32
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100598 };
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000599
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100600 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000601 "Reference Convolution2d: biases is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100602 }
Jan Eilers8eb25602020-03-09 12:13:48 +0000603 IgnoreUnused(descriptor);
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100604
605 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100606}
607
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100608bool RefLayerSupport::IsConvolution3dSupported(const TensorInfo& input,
609 const TensorInfo& output,
610 const Convolution3dDescriptor& descriptor,
611 const TensorInfo& weights,
612 const Optional<TensorInfo>& biases,
613 Optional<std::string&> reasonIfUnsupported) const
614{
615 bool supported = true;
616
617 // Define supported types.
618 std::array<DataType,7> supportedTypes =
619 {
620 DataType::BFloat16,
621 DataType::Float32,
622 DataType::Float16,
623 DataType::QAsymmS8,
624 DataType::QAsymmU8,
625 DataType::QSymmS8,
626 DataType::QSymmS16
627 };
628
629 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
630 "Reference Convolution3d: input is not a supported type.");
631
632 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
633 "Reference Convolution3d: output is not a supported type.");
634
635 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
636 "Reference Convolution3d: input and output types mismatched.");
637
638 const DataType inputType = input.GetDataType();
639 if (IsQuantized8BitType(inputType))
640 {
641 std::array<DataType, 3> supportedWeightTypes =
642 {
643 DataType::QAsymmS8,
644 DataType::QAsymmU8,
645 DataType::QSymmS8
646 };
647
648 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
649 "Reference Convolution3d: weights type not supported for quantized input.");
650 }
651 else
652 {
653 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
654 "Reference Convolution3d: weights is not a supported type.");
655
656 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
657 "Reference Convolution3d: input and weights types mismatched.");
658 }
659
660 if (biases.has_value())
661 {
662 std::array<DataType,4> biasesSupportedTypes =
663 {
664 DataType::BFloat16,
665 DataType::Float32,
666 DataType::Float16,
667 DataType::Signed32
668 };
669
670 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
671 "Reference Convolution3d: biases is not a supported type.");
672 }
673 IgnoreUnused(descriptor);
674
675 return supported;
676}
677
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000678bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
679 const TensorInfo& output,
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000680 Optional<std::string&> reasonIfUnsupported) const
681{
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100682 bool supported = true;
683
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000684 std::array<DataType, 8> supportedTypes =
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100685 {
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000686 DataType::BFloat16,
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000687 DataType::Float16,
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100688 DataType::Float32,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000689 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100690 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000691 DataType::QSymmS8,
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000692 DataType::QSymmS16,
693 DataType::Signed32
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100694 };
695
696 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000697 "Reference for Debug layer: input type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100698
699 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000700 "Reference for Debug layer: output type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100701
702 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000703 "Reference for Debug layer: input and output types are mismatched");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100704
705 return supported;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000706}
707
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100708bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
709 const TensorInfo& output,
710 const DepthToSpaceDescriptor& descriptor,
711 Optional<std::string&> reasonIfUnsupported) const
712{
Jan Eilers8eb25602020-03-09 12:13:48 +0000713 IgnoreUnused(descriptor);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100714 bool supported = true;
715
Sadik Armagan303980c2020-04-17 12:45:14 +0100716 std::array<DataType,6> supportedTypes =
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100717 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000718 DataType::BFloat16,
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100719 DataType::Float32,
720 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100721 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000722 DataType::QAsymmU8,
723 DataType::QSymmS16
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100724 };
725
726 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
727 "Reference DepthToSpace: input type not supported");
728
729 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
730 "Reference DepthToSpace: output type not supported");
731
732 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
733 "Reference DepthToSpace: input and output types are mismatched");
734
735 return supported;
736}
737
arovir011c7c81b2018-10-08 11:34:28 +0100738bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
739 const TensorInfo& output,
740 const DepthwiseConvolution2dDescriptor& descriptor,
741 const TensorInfo& weights,
742 const Optional<TensorInfo>& biases,
743 Optional<std::string&> reasonIfUnsupported) const
744{
Sadik Armagan303980c2020-04-17 12:45:14 +0100745 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100746 bool supported = true;
747
748 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000749 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100750 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000751 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100752 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100753 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000754 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000755 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100756 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000757 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100758 };
759
760 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
761 "Reference DepthwiseConvolution2d: input is not a supported type.");
762
763 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
764 "Reference DepthwiseConvolution2d: output is not a supported type.");
765
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100766 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
767 "Reference DepthwiseConvolution2d: input and output types mismatched.");
768
Teresa Charlind8df0262019-11-11 12:28:15 +0000769 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000770 if (IsQuantized8BitType(inputType))
Teresa Charlind8df0262019-11-11 12:28:15 +0000771 {
Sadik Armagan303980c2020-04-17 12:45:14 +0100772 ARMNN_NO_DEPRECATE_WARN_BEGIN
773 std::array<DataType, 4> supportedWeightTypes =
774 {
775 DataType::QAsymmS8,
776 DataType::QAsymmU8,
777 DataType::QSymmS8,
778 DataType::QuantizedSymm8PerAxis // deprecated
779 };
780 ARMNN_NO_DEPRECATE_WARN_END
Teresa Charlind8df0262019-11-11 12:28:15 +0000781
782 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Sadik Armagan303980c2020-04-17 12:45:14 +0100783 "Reference DepthwiseConvolution2d: weights type not supported for "
784 "quantized input.");
Teresa Charlind8df0262019-11-11 12:28:15 +0000785 }
786 else
787 {
788 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
789 "Reference DepthwiseConvolution2d: weights is not a supported type.");
790
791 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
792 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
793 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100794
795 if (biases.has_value())
796 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000797 std::array<DataType,4> biasesSupportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100798 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000799 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100800 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100801 DataType::Float16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100802 DataType::Signed32
803 };
804 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
805 "Reference DepthwiseConvolution2d: biases is not a supported type.");
806 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100807
808 return supported;
809
arovir011c7c81b2018-10-08 11:34:28 +0100810}
811
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000812bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
813 const TensorInfo& output,
814 Optional<std::string&> reasonIfUnsupported) const
815{
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100816 bool supported = true;
817
Ryan OShea9add1202020-02-07 10:06:33 +0000818 std::array<DataType,4> supportedInputTypes = {
819 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000820 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +0000821 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000822 DataType::QSymmS16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100823 };
824
825 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000826 "Reference for Dequantize layer: input type not supported.");
827
Derek Lambertid466a542020-01-22 15:37:29 +0000828 supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
Teresa Charlin1b1950d2021-06-02 20:23:21 +0100829 "Reference for Dequantize layer: per-axis quantized input not supported.");
Derek Lambertid466a542020-01-22 15:37:29 +0000830
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000831 std::array<DataType,3> supportedOutputTypes = {
832 DataType::BFloat16,
Jan Eilersf7107932019-11-01 11:09:36 +0000833 DataType::Float32,
834 DataType::Float16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100835 };
836
837 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000838 "Reference for Dequantize layer: output type not supported.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100839
840 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000841 "Reference for Dequantize layer: input/output shapes have different num total "
842 "elements.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100843
844 return supported;
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000845}
846
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000847bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
848 const TensorInfo& scores,
849 const TensorInfo& anchors,
850 const TensorInfo& detectionBoxes,
851 const TensorInfo& detectionClasses,
852 const TensorInfo& detectionScores,
853 const TensorInfo& numDetections,
854 const DetectionPostProcessDescriptor& descriptor,
855 Optional<std::string&> reasonIfUnsupported) const
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000856{
Jan Eilers8eb25602020-03-09 12:13:48 +0000857 IgnoreUnused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
Derek Lamberti901ea112019-12-10 22:07:09 +0000858
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100859 bool supported = true;
860
Sadik Armaganaa41d5d2020-11-16 14:27:52 +0000861 std::array<DataType,6> supportedInputTypes =
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100862 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000863 DataType::BFloat16,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100864 DataType::Float32,
Sadik Armaganaa41d5d2020-11-16 14:27:52 +0000865 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100866 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000867 DataType::QAsymmU8,
868 DataType::QSymmS16
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100869 };
870
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000871 supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100872 "Reference DetectionPostProcess: input 0 is not a supported type.");
873
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000874 supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100875 "Reference DetectionPostProcess: input 1 is not a supported type.");
876
877 return supported;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000878}
879
Pablo Tellof0bd6832019-04-26 17:58:13 +0100880bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
881 const TensorInfo& output,
882 const DepthwiseConvolution2dDescriptor& descriptor,
883 const TensorInfo& weights,
884 const Optional<TensorInfo>& biases,
885 Optional<std::string&> reasonIfUnsupported) const
886{
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100887 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
Pablo Tellof0bd6832019-04-26 17:58:13 +0100888}
889
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100890bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
arovir011c7c81b2018-10-08 11:34:28 +0100891 const TensorInfo& input1,
892 const TensorInfo& output,
893 Optional<std::string&> reasonIfUnsupported) const
894{
Sadik Armagan2999a022019-04-09 14:20:12 +0100895 bool supported = true;
896
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100897 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000898 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +0100899 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100900 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100901 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000902 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100903 DataType::QSymmS16,
904 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +0100905 };
906
907 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
908 "Reference division: input 0 is not a supported type.");
909
910 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
911 "Reference division: input 1 is not a supported type.");
912
913 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
914 "Reference division: output is not a supported type.");
915
916 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
917 "Reference division: input 0 and Input 1 types are mismatched");
918
919 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
920 "Reference division: input and output types are mismatched");
921
922 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
923 "Reference division: shapes are not suitable for implicit broadcast.");
924
925 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100926}
927
josh minor4a3c6102020-01-06 16:40:46 -0600928bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
929 const TensorInfo& output,
930 const ElementwiseUnaryDescriptor& descriptor,
931 Optional<std::string&> reasonIfUnsupported) const
932{
Jan Eilers8eb25602020-03-09 12:13:48 +0000933 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600934
Sadik Armagan303980c2020-04-17 12:45:14 +0100935 std::array<DataType, 7> supportedTypes =
josh minor4a3c6102020-01-06 16:40:46 -0600936 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000937 DataType::BFloat16,
josh minor4a3c6102020-01-06 16:40:46 -0600938 DataType::Float32,
939 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100940 DataType::QAsymmS8,
josh minor4a3c6102020-01-06 16:40:46 -0600941 DataType::QAsymmU8,
Sadik Armaganac472102020-03-24 09:54:36 +0000942 DataType::QSymmS16,
943 DataType::Signed32
josh minor4a3c6102020-01-06 16:40:46 -0600944 };
945
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000946 std::array<DataType, 1> logicalSupportedTypes =
947 {
948 DataType::Boolean
949 };
950
josh minor4a3c6102020-01-06 16:40:46 -0600951 bool supported = true;
952
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000953 if (descriptor.m_Operation == UnaryOperation::LogicalNot)
954 {
955 supported &= CheckSupportRule(TypeAnyOf(input, logicalSupportedTypes), reasonIfUnsupported,
956 "Reference elementwise unary: input type not supported");
josh minor4a3c6102020-01-06 16:40:46 -0600957
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000958 supported &= CheckSupportRule(TypeAnyOf(output, logicalSupportedTypes), reasonIfUnsupported,
959 "Reference elementwise unary: output type not supported");
960 }
961 else
962 {
963 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
964 "Reference elementwise unary: input type not supported");
965
966 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
967 "Reference elementwise unary: output type not supported");
968 }
josh minor4a3c6102020-01-06 16:40:46 -0600969
970 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
971 "Reference elementwise unary: input and output types not matching");
972
973 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
974 "Reference elementwise unary: input and output shapes"
975 "have different number of total elements");
976
977 return supported;
978}
979
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000980bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
981 const TensorInfo& input1,
982 const TensorInfo& output,
983 Optional<std::string&> reasonIfUnsupported) const
984{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100985 return IsComparisonSupported(input0,
986 input1,
987 output,
988 ComparisonDescriptor(ComparisonOperation::Equal),
989 reasonIfUnsupported);
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000990}
991
arovir011c7c81b2018-10-08 11:34:28 +0100992bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
993 const FakeQuantizationDescriptor& descriptor,
994 Optional<std::string&> reasonIfUnsupported) const
995{
Jan Eilers8eb25602020-03-09 12:13:48 +0000996 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100997 bool supported = true;
998
999 std::array<DataType,1> supportedTypes =
1000 {
1001 DataType::Float32
1002 };
1003
1004 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1005 "Reference fake quantization: input type not supported.");
1006
1007 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001008}
1009
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001010bool RefLayerSupport::IsFillSupported(const TensorInfo& input,
1011 const TensorInfo& output,
1012 const FillDescriptor& descriptor,
1013 Optional<std::string&> reasonIfUnsupported) const
1014{
1015 IgnoreUnused(descriptor);
1016 IgnoreUnused(output);
1017
1018 bool supported = true;
1019
Sadik Armagana792a052020-06-23 16:22:23 +01001020 std::array<DataType,3> supportedTypes =
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001021 {
1022 DataType::Float32,
Sadik Armagana792a052020-06-23 16:22:23 +01001023 DataType::Float16,
1024 DataType::Signed32
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001025 };
1026
Teresa Charlin4b10fef2020-07-29 09:36:41 +01001027 supported &= CheckSupportRule(TypeIs(input, DataType::Signed32), reasonIfUnsupported,
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001028 "Reference Fill: input type not supported.");
1029
Teresa Charlin44088502020-07-27 11:27:19 +01001030 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1031 "Reference Fill: output type not supported.");
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001032 return supported;
1033}
1034
arovir011c7c81b2018-10-08 11:34:28 +01001035bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
1036 const TensorInfo& output,
1037 Optional<std::string&> reasonIfUnsupported) const
1038{
Jan Eilers8eb25602020-03-09 12:13:48 +00001039 IgnoreUnused(output);
James Conroy83735b12019-05-30 16:36:59 +01001040 bool supported = true;
1041
Francis Murtaghe8ac1332020-07-30 18:03:40 +01001042 std::array<DataType,3> supportedTypes =
James Conroy83735b12019-05-30 16:36:59 +01001043 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001044 DataType::BFloat16,
James Conroyb40d7102019-06-04 12:32:09 +01001045 DataType::Float32,
Francis Murtaghe8ac1332020-07-30 18:03:40 +01001046 DataType::Float16
James Conroy83735b12019-05-30 16:36:59 +01001047 };
1048
1049 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1050 "Reference Floor: input type not supported.");
1051
1052 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1053 "Reference Floor: output type not supported.");
1054
1055 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001056}
1057
1058bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
1059 const TensorInfo& output,
1060 const TensorInfo& weights,
1061 const TensorInfo& biases,
1062 const FullyConnectedDescriptor& descriptor,
1063 Optional<std::string&> reasonIfUnsupported) const
1064{
Francis Murtagh46c09d02019-05-28 08:15:28 +01001065 bool supported = true;
1066
1067 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001068 std::array<DataType,6> supportedTypes =
Francis Murtagh46c09d02019-05-28 08:15:28 +01001069 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001070 DataType::BFloat16,
1071 DataType::Float32,
1072 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001073 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001074 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001075 DataType::QSymmS16
Francis Murtagh46c09d02019-05-28 08:15:28 +01001076 };
1077
1078 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1079 "Reference Fully Connected: input type not supported.");
1080
1081 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1082 "Reference Fully Connected: output type not supported.");
1083
Francis Murtagh46c09d02019-05-28 08:15:28 +01001084 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1085 "Reference Fully Connected: weights type not supported.");
1086
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +00001087 // For FullyConnected, we allow to have BFloat16 input with Float32 output for optimization.
1088 if (input.GetDataType() == DataType::BFloat16)
1089 {
1090 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
1091 {
1092 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
1093 supported = false;
1094 }
1095 }
1096 else
1097 {
1098 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1099 "Reference Fully Connected: input and output types mismatched.");
1100 }
1101
Jan Eilers1f45dc32020-06-15 11:43:03 +01001102 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1103 "Reference Fully Connected: weights is not a supported type.");
Francis Murtaghddb1d062020-03-10 13:51:45 +00001104
Jan Eilers1f45dc32020-06-15 11:43:03 +01001105 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
1106 "Reference Fully Connected: input and weights types mismatched.");
Francis Murtagh46c09d02019-05-28 08:15:28 +01001107
1108 if (descriptor.m_BiasEnabled)
1109 {
1110 // Defined supported types for bias
Sadik Armagandb73c982020-04-01 17:35:30 +01001111 std::array<DataType, 5>
Francis Murtagh46c09d02019-05-28 08:15:28 +01001112 supportedBiasTypes =
1113 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001114 DataType::BFloat16,
Francis Murtagh46c09d02019-05-28 08:15:28 +01001115 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001116 DataType::Float16,
Sadik Armagandb73c982020-04-01 17:35:30 +01001117 DataType::Signed32,
1118 DataType::QAsymmS8
Francis Murtagh46c09d02019-05-28 08:15:28 +01001119 };
1120
1121 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
1122 "Reference Fully Connected: bias type not supported.");
1123
1124 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
1125 "Reference Fully Connected: bias and weight types mismatch.");
1126
1127 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
1128 "Reference Fully Connected: bias type inferred from weights is incompatible.");
1129
Narumol Prangnawarat366d7232020-04-29 12:58:17 +01001130 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(biases, 1U), reasonIfUnsupported,
1131 "Reference Fully Connected: bias must have 1 dimension.");
1132
Francis Murtagh46c09d02019-05-28 08:15:28 +01001133 }
1134
1135 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001136}
1137
narpra014951d842019-01-18 16:53:53 +00001138bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
1139 const armnn::TensorInfo& input1,
1140 const armnn::TensorInfo& output,
Teresa Charlin52664732020-06-29 16:27:03 +01001141 const GatherDescriptor& descriptor,
narpra014951d842019-01-18 16:53:53 +00001142 armnn::Optional<std::string&> reasonIfUnsupported) const
1143{
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001144 bool supported = true;
Teresa Charlin3940d8b2020-05-29 16:47:23 +01001145 std::array<DataType,7> supportedTypes =
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001146 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001147 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001148 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001149 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001150 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001151 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01001152 DataType::QSymmS16,
1153 DataType::Signed32
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001154 };
1155
Teresa Charlin52664732020-06-29 16:27:03 +01001156 if (descriptor.m_Axis != 0)
1157 {
1158 reasonIfUnsupported.value() += std::string("Reference Gather: axis not supported\n");
1159 supported &= false;
1160 }
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001161 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1162 "Reference Gather: input type not supported");
1163
1164 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1165 "Reference Gather: output type not supported");
1166
1167 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
1168 "Reference Gather: indices (input1) type not supported");
1169
1170 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1171 "Reference Gather: input and output types not matching");
1172
1173 return supported;
narpra014951d842019-01-18 16:53:53 +00001174}
1175
FrancisMurtagh878f0232018-12-19 10:56:15 +00001176bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
1177 const TensorInfo& input1,
1178 const TensorInfo& output,
1179 Optional<std::string&> reasonIfUnsupported) const
1180{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001181 return IsComparisonSupported(input0,
1182 input1,
1183 output,
1184 ComparisonDescriptor(ComparisonOperation::Greater),
1185 reasonIfUnsupported);
FrancisMurtagh878f0232018-12-19 10:56:15 +00001186}
1187
Derek Lamberti901ea112019-12-10 22:07:09 +00001188bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
1189 Optional<std::string&> /*reasonIfUnsupported*/) const
arovir011c7c81b2018-10-08 11:34:28 +01001190{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001191 return true;
arovir011c7c81b2018-10-08 11:34:28 +01001192}
1193
Kevin May09ca49c2019-10-09 12:37:34 +01001194bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
1195 const TensorInfo& output,
1196 const InstanceNormalizationDescriptor& descriptor,
1197 Optional<std::string&> reasonIfUnsupported) const
1198{
Jan Eilers8eb25602020-03-09 12:13:48 +00001199 IgnoreUnused(descriptor);
Kevin May09ca49c2019-10-09 12:37:34 +01001200 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001201 std::array<DataType, 3> supportedTypes =
Kevin May09ca49c2019-10-09 12:37:34 +01001202 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001203 DataType::BFloat16,
Kevin May09ca49c2019-10-09 12:37:34 +01001204 DataType::Float32,
1205 DataType::Float16
1206 };
1207
1208 bool supported = true;
1209
1210 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1211 "Reference Instance Normalization: input type not supported.");
1212
1213 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1214 "Reference Instance Normalization: output type not supported.");
1215
1216 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1217 "Reference Instance Normalization: input and output types mismatched.");
1218
1219 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1220 "Reference Instance Normalization: input and output shapes have different "
1221 "num total elements.");
1222
1223 return supported;
1224}
1225
arovir011c7c81b2018-10-08 11:34:28 +01001226bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
1227 const TensorInfo& output,
1228 const L2NormalizationDescriptor& descriptor,
1229 Optional<std::string&> reasonIfUnsupported) const
1230{
Jan Eilers8eb25602020-03-09 12:13:48 +00001231 IgnoreUnused(descriptor);
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001232 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001233 std::array<DataType, 6> supportedTypes =
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001234 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001235 DataType::BFloat16,
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001236 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001237 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001238 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001239 DataType::QAsymmU8,
1240 DataType::QSymmS16
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001241 };
1242
1243 bool supported = true;
1244
1245 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1246 "Reference L2normalization: input type not supported.");
1247
1248 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1249 "Reference L2normalization: output type not supported.");
1250
1251 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1252 "Reference L2normalization: input and output types mismatched.");
1253
1254 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1255 "Reference L2normalization: input and output shapes have different "
1256 "num total elements.");
1257
1258 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001259}
1260
James Conroyaba90cd2020-11-06 16:28:18 +00001261bool RefLayerSupport::IsLogicalBinarySupported(const TensorInfo& input0,
1262 const TensorInfo& input1,
1263 const TensorInfo& output,
1264 const LogicalBinaryDescriptor& descriptor,
1265 Optional<std::string&> reasonIfUnsupported) const
1266{
1267 IgnoreUnused(descriptor);
1268
1269 std::array<DataType, 1> supportedTypes =
1270 {
1271 DataType::Boolean
1272 };
1273
1274 bool supported = true;
1275 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1276 "Reference LogicalBinary: input 0 type not supported");
1277 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1278 "Reference LogicalBinary: input 1 type not supported");
1279
1280 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1281 "Reference LogicalBinary: input and output types do not match");
1282
1283 return supported;
1284}
1285
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001286bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
1287 const TensorInfo& output,
1288 const LogSoftmaxDescriptor& descriptor,
1289 Optional<std::string&> reasonIfUnsupported) const
1290{
Jan Eilers8eb25602020-03-09 12:13:48 +00001291 IgnoreUnused(descriptor);
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001292
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001293 std::array<DataType, 3> supportedTypes =
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001294 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001295 DataType::BFloat16,
1296 DataType::Float32,
1297 DataType::Float16
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001298 };
1299
1300 bool supported = true;
1301 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1302 "Reference LogSoftmax: input type not supported");
1303
1304 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1305 "Reference LogSoftmax: output type not supported");
1306
1307 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1308 "Reference LogSoftmax: input and output types do not match");
1309
1310 return supported;
1311}
1312
arovir011c7c81b2018-10-08 11:34:28 +01001313bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1314 const TensorInfo& outputStateIn,
1315 const TensorInfo& cellStateIn,
1316 const TensorInfo& scratchBuffer,
1317 const TensorInfo& outputStateOut,
1318 const TensorInfo& cellStateOut,
1319 const TensorInfo& output,
1320 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001321 const LstmInputParamsInfo& paramsInfo,
1322 Optional<std::string&> reasonIfUnsupported) const
arovir011c7c81b2018-10-08 11:34:28 +01001323{
Jan Eilers8eb25602020-03-09 12:13:48 +00001324 IgnoreUnused(descriptor);
1325 IgnoreUnused(paramsInfo);
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001326
1327 bool supported = true;
1328
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001329 std::array<DataType,3> supportedTypes = {
1330 DataType::BFloat16,
Conor Kennedyb9971c92019-05-07 07:14:23 +01001331 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001332 DataType::QSymmS16
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001333 };
1334
Jan Eilersd01a83c2019-07-03 18:20:40 +01001335 // check inputs and outputs
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001336 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1337 "Reference Lstm: input is not a supported type.");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001338 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1339 "Reference Lstm: input and outputStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001340 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1341 "Reference Lstm: input and cellStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001342 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1343 "Reference Lstm: input and scratchBuffer types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001344 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1345 "Reference Lstm: input and outputStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001346 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1347 "Reference Lstm: input and cellStateOut types are mismatched");
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01001348
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001349 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1350 "Reference Lstm: input and output types are mismatched");
Jan Eilersd01a83c2019-07-03 18:20:40 +01001351 // check layer parameters
Francis Murtaghbb590b42019-08-14 09:51:36 +01001352 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001353 "Reference Lstm: input and InputToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001354 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001355 "Reference Lstm: input and InputToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001356 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001357 "Reference Lstm: input and InputToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001358 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001359 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001360 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001361 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001362 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001363 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001364 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001365 "Reference Lstm: input and ForgetGateBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001366 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001367 "Reference Lstm: input and CellBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001368 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001369 "Reference Lstm: input and OutputGateBias types are mismatched");
1370 if (!descriptor.m_CifgEnabled)
1371 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001372 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001373 "Reference Lstm: input and InputToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001374 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001375 reasonIfUnsupported,
1376 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001377 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001378 "Reference Lstm: input and InputGateBias types are mismatched");
1379 if (descriptor.m_PeepholeEnabled)
1380 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001381 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001382 reasonIfUnsupported,
1383 "Reference Lstm: input and CellToInputWeights types are mismatched");
1384 }
1385 }
1386 if (descriptor.m_PeepholeEnabled)
1387 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001388 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001389 "Reference Lstm: input and CellToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001390 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001391 "Reference Lstm: input and CellToOutputWeights types are mismatched");
1392 }
1393 if (descriptor.m_ProjectionEnabled)
1394 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001395 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001396 "Reference Lstm: input and mProjectionWeights types are mismatched");
1397 if (paramsInfo.m_ProjectionBias != nullptr)
1398 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001399 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001400 "Reference Lstm: input and ProjectionBias types are mismatched");
1401 }
1402 }
1403 if (descriptor.m_LayerNormEnabled)
1404 {
1405 if (!descriptor.m_CifgEnabled)
1406 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001407 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001408 reasonIfUnsupported,
1409 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1410 }
Francis Murtaghbb590b42019-08-14 09:51:36 +01001411 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001412 reasonIfUnsupported,
1413 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001414 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001415 reasonIfUnsupported,
1416 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001417 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001418 reasonIfUnsupported,
1419 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1420 }
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001421
1422 return supported;
telsoa01c577f2c2018-08-31 09:22:23 +01001423}
1424
saoste012df12b32018-11-28 16:57:20 +00001425bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1426 const TensorInfo& input1,
1427 const TensorInfo& output,
1428 Optional<std::string&> reasonIfUnsupported) const
1429{
Sadik Armagan2999a022019-04-09 14:20:12 +01001430 bool supported = true;
1431
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001432 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001433 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001434 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001435 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001436 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001437 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001438 DataType::QSymmS16,
1439 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001440 };
1441
1442 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1443 "Reference maximum: input 0 is not a supported type.");
1444
1445 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1446 "Reference maximum: input 1 is not a supported type.");
1447
1448 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1449 "Reference maximum: output is not a supported type.");
1450
1451 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1452 "Reference maximum: input 0 and Input 1 types are mismatched");
1453
1454 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1455 "Reference maximum: input and output types are mismatched");
1456
1457 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1458 "Reference maximum: shapes are not suitable for implicit broadcast.");
1459
1460 return supported;
saoste012df12b32018-11-28 16:57:20 +00001461}
1462
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001463bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1464 const TensorInfo& output,
1465 const MeanDescriptor& descriptor,
1466 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001467{
James Conroy4d1ff582019-06-10 17:06:39 +01001468 bool supported = true;
1469 std::string meanLayerStr = "Mean";
1470 std::string outputTensorStr = "output";
1471
Sadik Armagan303980c2020-04-17 12:45:14 +01001472 std::array<DataType,6> supportedTypes =
James Conroy4d1ff582019-06-10 17:06:39 +01001473 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001474 DataType::BFloat16,
James Conroy4d1ff582019-06-10 17:06:39 +01001475 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001476 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001477 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001478 DataType::QAsymmU8,
1479 DataType::QSymmS16
James Conroy4d1ff582019-06-10 17:06:39 +01001480 };
1481
1482 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1483 "Reference Mean: input type not supported.");
1484
1485 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1486 "Reference Mean: input and output types are mismatched");
1487
1488 if (descriptor.m_KeepDims)
1489 {
1490 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1491 reasonIfUnsupported,
1492 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1493 output.GetNumDimensions(),
1494 meanLayerStr, outputTensorStr).data());
1495 }
1496 else if (descriptor.m_Axis.empty())
1497 {
1498 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1499 reasonIfUnsupported,
1500 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1501 meanLayerStr, outputTensorStr).data());
1502 }
1503 else
1504 {
Matthew Sloyan171214c2020-09-09 09:07:37 +01001505 auto outputDim = input.GetNumDimensions() - armnn::numeric_cast<unsigned int>(descriptor.m_Axis.size());
James Conroy4d1ff582019-06-10 17:06:39 +01001506
1507 if (outputDim > 0)
1508 {
1509 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1510 reasonIfUnsupported,
1511 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1512 meanLayerStr, outputTensorStr).data());
1513 }
1514 else
1515 {
1516 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1517 reasonIfUnsupported,
1518 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1519 meanLayerStr, outputTensorStr).data());
1520 }
1521 }
1522
1523 return supported;
narpra0132b90462018-09-13 11:07:48 +01001524}
1525
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001526bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
Nikhil Raj8599a412018-11-19 14:51:07 +00001527 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +01001528 const MergerDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001529 Optional<std::string&> reasonIfUnsupported) const
1530{
Jim Flynne242f2d2019-05-22 14:24:13 +01001531 return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001532}
1533
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001534bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1535 const TensorInfo &output,
1536 Optional<std::string &> reasonIfUnsupported) const
1537{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001538 bool supported = true;
1539
Sadik Armagan303980c2020-04-17 12:45:14 +01001540 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001541 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001542 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001543 DataType::Float32,
1544 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001545 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001546 DataType::QAsymmU8,
1547 DataType::QSymmS16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001548 DataType::Boolean
1549 };
1550
1551 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1552 "Reference MemCopy: input type not supported");
1553
1554 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1555 "Reference MemCopy: output type not supported");
1556
1557 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1558 "Reference MemCopy: input and output types are mismatched");
1559
1560 return supported;
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001561}
1562
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001563bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1564 const TensorInfo& input1,
1565 const TensorInfo& output,
1566 Optional<std::string&> reasonIfUnsupported) const
1567{
Sadik Armagan2999a022019-04-09 14:20:12 +01001568 bool supported = true;
1569
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001570 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001571 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001572 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001573 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001574 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001575 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001576 DataType::QSymmS16,
1577 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001578 };
1579
1580 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1581 "Reference minimum: input 0 is not a supported type.");
1582
1583 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1584 "Reference minimum: input 1 is not a supported type.");
1585
1586 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1587 "Reference minimum: output is not a supported type.");
1588
1589 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1590 "Reference minimum: input 0 and Input 1 types are mismatched");
1591
1592 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1593 "Reference minimum: input and output types are mismatched");
1594
1595 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1596 "Reference minimum: shapes are not suitable for implicit broadcast.");
1597
1598 return supported;
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001599}
1600
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001601bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1602 const TensorInfo& input1,
1603 const TensorInfo& output,
1604 Optional<std::string&> reasonIfUnsupported) const
1605{
Sadik Armagan2999a022019-04-09 14:20:12 +01001606 bool supported = true;
1607
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001608 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001609 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001610 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001611 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001612 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001613 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001614 DataType::QSymmS16,
1615 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001616 };
1617
1618 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1619 "Reference multiplication: input 0 is not a supported type.");
1620
1621 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1622 "Reference multiplication: input 1 is not a supported type.");
1623
1624 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1625 "Reference multiplication: output is not a supported type.");
1626
1627 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1628 "Reference multiplication: input 0 and Input 1 types are mismatched");
1629
1630 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1631 "Reference multiplication: input and output types are mismatched");
1632
1633 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1634 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1635
1636 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001637}
1638
1639bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1640 const TensorInfo& output,
1641 const NormalizationDescriptor& descriptor,
1642 Optional<std::string&> reasonIfUnsupported) const
Nina Drozd661dfa72018-10-02 11:14:17 +01001643{
Jan Eilers8eb25602020-03-09 12:13:48 +00001644 IgnoreUnused(descriptor);
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001645
1646 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001647 std::array<DataType, 6> supportedTypes =
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001648 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001649 DataType::BFloat16,
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001650 DataType::Float16,
1651 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001652 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001653 DataType::QAsymmU8,
1654 DataType::QSymmS16
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001655 };
1656
1657 bool supported = true;
1658
1659 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1660 "Reference normalization: input type not supported.");
1661
1662 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1663 "Reference normalization: output type not supported.");
1664
1665 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1666 "Reference normalization: input and output shapes have different "
1667 "num total elements.");
1668
1669 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001670}
1671
Derek Lamberti901ea112019-12-10 22:07:09 +00001672bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1673 Optional<std::string&> /*reasonIfUnsupported*/) const
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001674{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001675 return true;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001676}
1677
1678bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1679 const TensorInfo& output,
1680 const PadDescriptor& descriptor,
1681 Optional<std::string&> reasonIfUnsupported) const
1682{
Jan Eilers8eb25602020-03-09 12:13:48 +00001683 IgnoreUnused(descriptor);
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001684 bool supported = true;
1685
1686 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001687 std::array<DataType,6> supportedTypes =
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001688 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001689 DataType::BFloat16,
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001690 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001691 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001692 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001693 DataType::QAsymmU8,
1694 DataType::QSymmS16
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001695 };
1696
1697 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1698 "Reference pad: input is not a supported type.");
1699
1700 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1701 "Reference pad: output is not a supported type.");
1702
1703 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1704 "Reference pad: input and output types are mismatched.");
1705
1706 return supported;
Nina Drozd661dfa72018-10-02 11:14:17 +01001707}
1708
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001709bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1710 const TensorInfo& output,
1711 const PermuteDescriptor& descriptor,
1712 Optional<std::string&> reasonIfUnsupported) const
1713{
Jan Eilers8eb25602020-03-09 12:13:48 +00001714 IgnoreUnused(descriptor);
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001715 bool supported = true;
1716
1717 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001718 std::array<DataType, 6> supportedTypes =
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001719 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001720 DataType::BFloat16,
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001721 DataType::Float32,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001722 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001723 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001724 DataType::QAsymmU8,
1725 DataType::QSymmS16
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001726 };
1727
1728 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1729 "Reference permute: input is not a supported type.");
1730
1731 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1732 "Reference permute: output is not a supported type.");
1733
1734 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1735 "Reference permute: input and output types are mismatched.");
1736
1737 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001738}
1739
1740bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1741 const TensorInfo& output,
1742 const Pooling2dDescriptor& descriptor,
1743 Optional<std::string&> reasonIfUnsupported) const
1744{
Jan Eilers8eb25602020-03-09 12:13:48 +00001745 IgnoreUnused(descriptor);
Teresa Charlina3b20472019-06-06 11:12:32 +01001746 bool supported = true;
1747
1748 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001749 std::array<DataType,6> supportedTypes =
Teresa Charlina3b20472019-06-06 11:12:32 +01001750 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001751 DataType::BFloat16,
Teresa Charlina3b20472019-06-06 11:12:32 +01001752 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001753 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001754 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001755 DataType::QAsymmU8,
1756 DataType::QSymmS16
Teresa Charlina3b20472019-06-06 11:12:32 +01001757 };
1758
1759 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1760 "Reference poolind2d: input is not a supported type.");
1761
1762 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1763 "Reference poolind2d: output is not a supported type.");
1764
1765 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1766 "Reference poolind2d: input and output types are mismatched.");
1767
1768 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001769}
1770
James Conroy4f1f8992020-04-29 20:01:10 +01001771bool RefLayerSupport::IsQLstmSupported(const TensorInfo& input,
1772 const TensorInfo& previousOutputIn,
1773 const TensorInfo& previousCellStateIn,
1774 const TensorInfo& outputStateOut,
1775 const TensorInfo& cellStateOut,
1776 const TensorInfo& output,
1777 const QLstmDescriptor& descriptor,
1778 const LstmInputParamsInfo& paramsInfo,
1779 Optional<std::string&> reasonIfUnsupported) const
1780{
1781 IgnoreUnused(input);
1782 IgnoreUnused(previousOutputIn);
1783 IgnoreUnused(previousCellStateIn);
1784 IgnoreUnused(outputStateOut);
1785 IgnoreUnused(cellStateOut);
1786 IgnoreUnused(output);
1787 IgnoreUnused(descriptor);
1788 IgnoreUnused(paramsInfo);
1789
1790 IgnoreUnused(reasonIfUnsupported);
1791
1792 return true;
1793}
1794
Derek Lamberti5f400d62019-03-25 15:41:58 +00001795bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1796 const TensorInfo& output,
1797 Optional<std::string&> reasonIfUnsupported) const
1798{
1799 bool supported = true;
1800
Finn Williamsfd271062019-12-04 14:27:27 +00001801 // Define supported input types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001802 std::array<DataType,7> supportedInputTypes = {
1803 DataType::BFloat16,
Keith Davis5e51cd82020-01-29 16:52:59 +00001804 DataType::Float32,
Keith Davis3d8bc972020-02-04 09:31:47 +00001805 DataType::Float16,
Ryan OShea9add1202020-02-07 10:06:33 +00001806 DataType::QAsymmS8,
Keith Davis5e51cd82020-01-29 16:52:59 +00001807 DataType::QAsymmU8,
1808 DataType::QSymmS8,
1809 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001810 };
1811
1812 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1813 "Reference quantize: input type not supported.");
1814
1815 // Define supported output types.
Ryan OShea9add1202020-02-07 10:06:33 +00001816 std::array<DataType,4> supportedOutputTypes = {
Ryan OShea9add1202020-02-07 10:06:33 +00001817 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001818 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +00001819 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001820 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001821 };
1822 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1823 "Reference quantize: output type not supported.");
1824
1825 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1826 "Reference quantize: input and output shapes have different num total elements.");
1827
1828 return supported;
1829}
1830
Finn Williams2605b232020-06-10 15:53:46 +01001831bool RefLayerSupport::IsRankSupported(const TensorInfo& input,
1832 const TensorInfo& output,
1833 Optional<std::string&> reasonIfUnsupported) const
1834{
1835 IgnoreUnused(input);
1836 // Define supported output types.
1837 std::array<DataType,1> supportedOutputTypes =
1838 {
1839 DataType::Signed32,
1840 };
1841
1842 return CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1843 "Reference rank: input type not supported.");
1844}
1845
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001846bool RefLayerSupport::IsReduceSupported(const TensorInfo& input,
1847 const TensorInfo& output,
1848 const ReduceDescriptor& descriptor,
1849 Optional<std::string&> reasonIfUnsupported) const
1850{
1851 IgnoreUnused(descriptor);
1852 bool supported = true;
1853 std::array<DataType,7> supportedTypes =
1854 {
1855 DataType::BFloat16,
1856 DataType::Float32,
1857 DataType::Float16,
1858 DataType::QAsymmS8,
1859 DataType::QAsymmU8,
1860 DataType::QSymmS16,
1861 DataType::Signed32
1862 };
1863
1864 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1865 "Reference Reduce: input type not supported");
1866
1867 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1868 "Reference Reduce: output type not supported");
1869
1870 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1871 "Reference Reduce: input and output types not matching");
1872
1873 return supported;
1874}
1875
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001876bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001877 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001878 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001879 Optional<std::string&> reasonIfUnsupported) const
1880{
Jan Eilers8eb25602020-03-09 12:13:48 +00001881 IgnoreUnused(output);
1882 IgnoreUnused(descriptor);
Nina Drozd2f2778f2019-05-27 10:37:05 +01001883 // Define supported output types.
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +00001884 std::array<DataType,8> supportedOutputTypes =
Nina Drozd2f2778f2019-05-27 10:37:05 +01001885 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001886 DataType::BFloat16,
Nina Drozd2f2778f2019-05-27 10:37:05 +01001887 DataType::Float32,
1888 DataType::Float16,
Narumol Prangnawarat0718ee92019-09-13 16:53:38 +01001889 DataType::Signed32,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001890 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001891 DataType::QAsymmU8,
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +00001892 DataType::QSymmS16,
1893 DataType::Boolean
Nina Drozd2f2778f2019-05-27 10:37:05 +01001894 };
Keith Davis0c2eeac2020-02-11 16:51:50 +00001895
Nina Drozd2f2778f2019-05-27 10:37:05 +01001896 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1897 "Reference reshape: input type not supported.");
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001898}
1899
1900bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
Sadik Armaganc625f002018-12-17 11:32:16 +00001901 const TensorInfo& output,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001902 Optional<std::string&> reasonIfUnsupported) const
1903{
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001904 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001905 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001906 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001907 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001908 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001909 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001910 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001911 DataType::QAsymmU8,
1912 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001913 };
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001914
1915 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1916 "Reference ResizeBilinear: input type not supported");
1917
1918 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1919 "Reference ResizeBilinear: output type not supported");
1920
1921 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1922 "Reference ResizeBilinear: input and output types not matching");
1923
1924 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001925}
1926
Teresa Charlin970f43b2019-07-01 13:51:07 +01001927bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1928 const TensorInfo& output,
1929 const ResizeDescriptor& descriptor,
1930 Optional<std::string&> reasonIfUnsupported) const
1931{
Jan Eilers8eb25602020-03-09 12:13:48 +00001932 IgnoreUnused(descriptor);
Teresa Charlin970f43b2019-07-01 13:51:07 +01001933 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001934 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001935 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001936 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001937 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001938 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001939 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001940 DataType::QAsymmU8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001941 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001942 };
1943
1944 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1945 "Reference Resize: input type not supported");
1946
1947 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1948 "Reference Resize: output type not supported");
1949
1950 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1951 "Reference Resize: input and output types not matching");
1952
1953 return supported;
1954}
1955
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001956bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
1957 const TensorInfo& output,
1958 Optional<std::string&> reasonIfUnsupported) const
1959{
josh minor4a3c6102020-01-06 16:40:46 -06001960 return IsElementwiseUnarySupported(input,
1961 output,
1962 ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt),
1963 reasonIfUnsupported);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001964}
1965
Keith Davis3ae3f972021-05-21 16:33:48 +01001966bool RefLayerSupport::IsShapeSupported(const TensorInfo& input,
1967 const TensorInfo& output,
1968 Optional<std::string&> reasonIfUnsupported) const
1969{
1970 IgnoreUnused(input);
1971 bool supported = true;
1972
1973 std::array<DataType, 1> supportedTypes =
1974 {
1975 DataType::Signed32
1976 };
1977
1978 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1979 "Reference Shape: output type not supported");
1980
1981 return supported;
1982}
1983
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001984bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1985 const TensorInfo& output,
1986 const SliceDescriptor& descriptor,
1987 Optional<std::string&> reasonIfUnsupported) const
1988{
Jan Eilers8eb25602020-03-09 12:13:48 +00001989 IgnoreUnused(descriptor);
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001990 bool supported = true;
1991
Sadik Armagan303980c2020-04-17 12:45:14 +01001992 std::array<DataType, 5> supportedTypes =
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001993 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001994 DataType::BFloat16,
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001995 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001996 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001997 DataType::QAsymmU8,
1998 DataType::QSymmS16
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001999 };
2000
2001 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2002 "Reference Slice: input type not supported");
2003
2004 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2005 "Reference Slice: output type not supported");
2006
2007 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2008 "Reference Slice: input and output types are mismatched");
2009
2010 return supported;
2011}
2012
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002013bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
2014 const TensorInfo& output,
2015 const SoftmaxDescriptor& descriptor,
2016 Optional<std::string&> reasonIfUnsupported) const
2017{
Jan Eilers8eb25602020-03-09 12:13:48 +00002018 IgnoreUnused(descriptor);
nikraj01248683f2019-05-29 16:46:50 +01002019 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002020 std::array<DataType,7> supportedTypes =
nikraj01248683f2019-05-29 16:46:50 +01002021 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002022 DataType::BFloat16,
2023 DataType::Float32,
2024 DataType::Float16,
2025 DataType::QSymmS8,
2026 DataType::QAsymmS8,
2027 DataType::QAsymmU8,
2028 DataType::QSymmS16
nikraj01248683f2019-05-29 16:46:50 +01002029 };
2030
2031 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01002032 "Reference Softmax: output type not supported");
nikraj01248683f2019-05-29 16:46:50 +01002033
2034 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01002035 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01002036
2037 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01002038 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01002039
2040 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002041}
2042
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00002043bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
2044 const TensorInfo& output,
2045 const SpaceToBatchNdDescriptor& descriptor,
2046 Optional<std::string&> reasonIfUnsupported) const
2047{
Jan Eilers8eb25602020-03-09 12:13:48 +00002048 IgnoreUnused(descriptor);
nikraj01120522a2019-05-31 11:33:07 +01002049 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002050 std::array<DataType,6> supportedTypes =
nikraj01120522a2019-05-31 11:33:07 +01002051 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002052 DataType::BFloat16,
2053 DataType::Float32,
2054 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002055 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002056 DataType::QAsymmU8,
2057 DataType::QSymmS16
nikraj01120522a2019-05-31 11:33:07 +01002058 };
2059
2060 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2061 "Reference SpaceToBatchNd: input type not supported");
2062
2063 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2064 "Reference SpaceToBatchNd: output type not supported");
2065
2066 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2067 "Reference SpaceToBatchNd: input and output types are mismatched");
2068
2069 return supported;
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00002070}
2071
Keith Davisa57eccb2019-06-14 17:33:22 +01002072bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
Keith Davis51910332019-06-26 15:28:43 +01002073 const TensorInfo& output,
2074 const SpaceToDepthDescriptor& descriptor,
2075 Optional<std::string&> reasonIfUnsupported) const
Keith Davisa57eccb2019-06-14 17:33:22 +01002076{
2077
Jan Eilers8eb25602020-03-09 12:13:48 +00002078 IgnoreUnused(descriptor);
Keith Davisa57eccb2019-06-14 17:33:22 +01002079 bool supported = true;
2080
Sadik Armagan303980c2020-04-17 12:45:14 +01002081 std::array<DataType,6> supportedTypes =
Keith Davisa57eccb2019-06-14 17:33:22 +01002082 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002083 DataType::BFloat16,
Keith Davisa57eccb2019-06-14 17:33:22 +01002084 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002085 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002086 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002087 DataType::QAsymmU8,
2088 DataType::QSymmS16
Keith Davisa57eccb2019-06-14 17:33:22 +01002089 };
2090
2091 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2092 "Reference SpaceToDepth: input type not supported");
2093
2094 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2095 "Reference SpaceToDepth: output type not supported");
2096
2097 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2098 "Reference SpaceToDepth: input and output types are mismatched");
2099
2100 return supported;
2101}
2102
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002103bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
2104 const ViewsDescriptor& descriptor,
2105 Optional<std::string&> reasonIfUnsupported) const
2106{
Jan Eilers8eb25602020-03-09 12:13:48 +00002107 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002108 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002109 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002110 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002111 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002112 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002113 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002114 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002115 DataType::QAsymmU8,
2116 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002117 };
2118
2119 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2120 "Reference splitter: input type not supported");
2121
2122 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002123}
2124
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01002125bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
2126 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
2127 const ViewsDescriptor& descriptor,
2128 Optional<std::string&> reasonIfUnsupported) const
2129{
Jan Eilers8eb25602020-03-09 12:13:48 +00002130 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002131 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002132 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002133 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002134 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002135 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002136 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002137 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002138 DataType::QAsymmU8,
2139 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002140 };
2141
2142 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2143 "Reference splitter: output type not supported");
Derek Lambertieac4adb2020-08-25 13:05:59 +01002144 for (const TensorInfo& output : outputs)
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002145 {
2146 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2147 "Reference splitter: input type not supported");
2148
2149 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2150 "Reference splitter: input and output types mismatched.");
2151 }
2152
2153 return supported;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01002154}
2155
Matthew Jackson81e601c2019-07-11 12:07:09 +01002156bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
2157 const TensorInfo& output,
2158 const StackDescriptor& descriptor,
2159 Optional<std::string&> reasonIfUnsupported) const
2160{
Jan Eilers8eb25602020-03-09 12:13:48 +00002161 IgnoreUnused(descriptor);
Matthew Jackson81e601c2019-07-11 12:07:09 +01002162
2163 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002164 std::array<DataType,6> supportedTypes =
Matthew Jackson81e601c2019-07-11 12:07:09 +01002165 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002166 DataType::BFloat16,
Matthew Jackson81e601c2019-07-11 12:07:09 +01002167 DataType::Float32,
Matthew Jacksone69c3992019-09-09 14:31:21 +01002168 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002169 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002170 DataType::QAsymmU8,
2171 DataType::QSymmS16
Matthew Jackson81e601c2019-07-11 12:07:09 +01002172 };
2173
2174 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2175 "Reference stack: output type not supported");
2176 for (const TensorInfo* input : inputs)
2177 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002178 ARMNN_ASSERT(input != nullptr);
Matthew Jackson81e601c2019-07-11 12:07:09 +01002179 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
2180 "Reference stack: input type not supported");
2181
2182 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
2183 "Reference stack: input and output types mismatched.");
2184 }
2185
2186 return supported;
2187}
2188
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002189bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
2190 const TensorInfo& output,
2191 const StridedSliceDescriptor& descriptor,
2192 Optional<std::string&> reasonIfUnsupported) const
2193{
Jan Eilers8eb25602020-03-09 12:13:48 +00002194 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002195 bool supported = true;
2196
Sadik Armagan303980c2020-04-17 12:45:14 +01002197 std::array<DataType,5> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002198 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002199 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002200 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01002201 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002202 DataType::QAsymmU8,
2203 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002204 };
2205
2206 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2207 "Reference StridedSlice: input type not supported");
2208
2209 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2210 "Reference StridedSlice: output type not supported");
2211
2212 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2213 "Reference StridedSlice: input and output types are mismatched");
2214
2215 return supported;
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002216}
2217
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002218bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
2219 const TensorInfo& input1,
2220 const TensorInfo& output,
2221 Optional<std::string&> reasonIfUnsupported) const
2222{
Sadik Armagan2999a022019-04-09 14:20:12 +01002223 bool supported = true;
2224
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002225 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002226 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01002227 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002228 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002229 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002230 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002231 DataType::QSymmS16,
2232 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01002233 };
2234
2235 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
2236 "Reference subtraction: input 0 is not a supported type.");
2237
2238 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
2239 "Reference subtraction: input 1 is not a supported type.");
2240
2241 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2242 "Reference subtraction: output is not a supported type.");
2243
2244 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
2245 "Reference subtraction: input 0 and Input 1 types are mismatched");
2246
2247 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
2248 "Reference subtraction: input and output types are mismatched");
2249
2250 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
2251 "Reference subtraction: shapes are not suitable for implicit broadcast.");
2252
2253 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002254}
2255
Matteo Martincighab9e5252019-06-13 17:27:46 +01002256bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
2257 const TensorInfo& alpha,
2258 const TensorInfo& output,
2259 Optional<std::string&> reasonIfUnsupported) const
2260{
2261 bool supported = true;
2262
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002263 std::array<DataType, 6> supportedTypes
Matteo Martincighab9e5252019-06-13 17:27:46 +01002264 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002265 DataType::BFloat16,
Matteo Martincighab9e5252019-06-13 17:27:46 +01002266 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002267 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002268 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002269 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002270 DataType::QSymmS16
Matteo Martincighab9e5252019-06-13 17:27:46 +01002271 };
2272
2273 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2274 "PReLU: input is not a supported type.");
2275
2276 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
2277 "PReLU: alpha is not a supported type.");
2278
2279 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2280 "PReLU: output is not a supported type.");
2281
2282 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
2283 "PReLU: input, alpha and output types are mismatched");
2284
2285 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
2286 "PReLU: shapes are not suitable for implicit broadcast");
2287
2288 return supported;
2289}
2290
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002291bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
2292 const TensorInfo& output,
2293 const TransposeConvolution2dDescriptor& descriptor,
2294 const TensorInfo& weights,
2295 const Optional<TensorInfo>& biases,
2296 Optional<std::string&> reasonIfUnsupported) const
2297{
Jan Eilers8eb25602020-03-09 12:13:48 +00002298 IgnoreUnused(descriptor);
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002299 bool supported = true;
2300
Sadik Armagan303980c2020-04-17 12:45:14 +01002301 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002302 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002303 DataType::BFloat16,
2304 DataType::Float32,
2305 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002306 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002307 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +01002308 DataType::QSymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002309 DataType::QSymmS16
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002310 };
2311
2312 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2313 "Reference TransposeConvolution2d: input is not a supported type.");
2314
2315 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2316 "Reference TransposeConvolution2d: output is not a supported type.");
2317
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002318 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2319 "Reference TransposeConvolution2d: input and output types mismatched.");
2320
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002321
2322 const DataType inputType = input.GetDataType();
Sadik Armagan303980c2020-04-17 12:45:14 +01002323 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002324 {
Derek Lambertid466a542020-01-22 15:37:29 +00002325 ARMNN_NO_DEPRECATE_WARN_BEGIN
Sadik Armagan303980c2020-04-17 12:45:14 +01002326 std::array<DataType, 4> supportedWeightTypes =
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002327 {
Sadik Armagan303980c2020-04-17 12:45:14 +01002328 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002329 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +00002330 DataType::QSymmS8,
2331 DataType::QuantizedSymm8PerAxis //Deprecated
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002332 };
Derek Lambertid466a542020-01-22 15:37:29 +00002333 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002334
2335 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
2336 "Reference TransposeConvolution2d: weights type not supported for "
2337 "quantized input.");
2338 }
2339 else
2340 {
2341 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
2342 "Reference TransposeConvolution2d: weights is not a supported type.");
2343
2344 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
2345 "Reference TransposeConvolution2d: input and weights types mismatched.");
2346 }
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002347
2348 if (biases.has_value())
2349 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002350 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar651aafe2019-08-05 11:52:05 +01002351 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002352 DataType::BFloat16,
2353 DataType::Float32,
2354 DataType::Float16,
2355 DataType::Signed32
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002356 };
2357 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
2358 "Reference TransposeConvolution2d: biases is not a supported type.");
2359 }
2360
2361 return supported;
2362}
2363
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002364bool RefLayerSupport::IsTransposeSupported(const TensorInfo& input,
2365 const TensorInfo& output,
2366 const TransposeDescriptor& descriptor,
2367 Optional<std::string&> reasonIfUnsupported) const
2368{
Jan Eilers8eb25602020-03-09 12:13:48 +00002369 IgnoreUnused(descriptor);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002370 bool supported = true;
2371
2372 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01002373 std::array<DataType, 6> supportedTypes =
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002374 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002375 DataType::BFloat16,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002376 DataType::Float32,
2377 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002378 DataType::QAsymmS8,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002379 DataType::QAsymmU8,
2380 DataType::QSymmS16
2381 };
2382
2383 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2384 "Reference transpose: input is not a supported type.");
2385
2386 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2387 "Reference transpose: output is not a supported type.");
2388
2389 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2390 "Reference transpose: input and output types are mismatched.");
2391
2392 return supported;
2393}
2394
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002395bool RefLayerSupport::IsUnidirectionalSequenceLstmSupported(
2396 const TensorInfo& input,
2397 const TensorInfo& outputStateIn,
2398 const TensorInfo& cellStateIn,
2399 const TensorInfo& output,
2400 const Optional<TensorInfo>& hiddenStateOutput,
2401 const Optional<TensorInfo>& cellStateOutput,
2402 const UnidirectionalSequenceLstmDescriptor& descriptor,
2403 const LstmInputParamsInfo& paramsInfo,
2404 Optional<std::string&> reasonIfUnsupported) const
2405{
2406 IgnoreUnused(descriptor);
2407 IgnoreUnused(paramsInfo);
2408 IgnoreUnused(outputStateIn);
2409 IgnoreUnused(cellStateIn);
2410 bool supported = true;
2411
2412 if (hiddenStateOutput.has_value() || cellStateOutput.has_value())
2413 {
2414 reasonIfUnsupported.value() += "Reference UnidirectionalSequenceLstm: hidden state output "
2415 "and cell state output are not supported at the moment.";
2416 }
2417
2418 std::array<DataType, 1> supportedTypes =
2419 {
2420 DataType::Float32
2421 };
2422
Narumol Prangnawaratbd575b22021-08-31 16:53:54 +01002423 std::array<DataType, 2> supportedWeightTypes =
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002424 {
Narumol Prangnawaratbd575b22021-08-31 16:53:54 +01002425 DataType::Float32,
2426 DataType::QAsymmS8
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002427 };
2428
2429 // check inputs and outputs
2430 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2431 "Reference UnidirectionalSequenceLstm: input is not a supported type.");
2432 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
2433 "Reference UnidirectionalSequenceLstm: input and outputStateIn types are mismatched");
2434 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
2435 "Reference UnidirectionalSequenceLstm: input and cellStateIn types are mismatched");
2436
2437 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2438 "Reference UnidirectionalSequenceLstm: input and output types are mismatched");
2439 // check layer parameters
2440 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToForgetWeights(), supportedWeightTypes),
2441 reasonIfUnsupported,
2442 "Reference UnidirectionalSequenceLstm: InputToForgetWeights "
2443 "is not a supported type.");
2444 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToCellWeights(), supportedWeightTypes),
2445 reasonIfUnsupported,
2446 "Reference UnidirectionalSequenceLstm: InputToCellWeights is not a supported type.");
2447 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToOutputWeights(), supportedWeightTypes),
2448 reasonIfUnsupported,
2449 "Reference UnidirectionalSequenceLstm: InputToOutputWeights "
2450 "is not a supported type.");
2451 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToForgetWeights(), supportedWeightTypes),
2452 reasonIfUnsupported,
2453 "Reference UnidirectionalSequenceLstm: RecurrentToForgetWeights "
2454 "is not a supported type.");
2455 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToCellWeights(), supportedWeightTypes),
2456 reasonIfUnsupported,
2457 "Reference UnidirectionalSequenceLstm: RecurrentToCellWeights "
2458 "is not a supported type.");
2459 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToOutputWeights(), supportedWeightTypes),
2460 reasonIfUnsupported,
2461 "Reference UnidirectionalSequenceLstm: RecurrentToOutputWeights "
2462 "is not a supported type.");
2463 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
2464 "Reference UnidirectionalSequenceLstm: input and ForgetGateBias types "
2465 "are mismatched");
2466 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
2467 "Reference UnidirectionalSequenceLstm: input and CellBias types are mismatched");
2468 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
2469 "Reference UnidirectionalSequenceLstm: input and OutputGateBias types "
2470 "are mismatched");
2471 if (!descriptor.m_CifgEnabled)
2472 {
2473 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToInputWeights(), supportedWeightTypes),
2474 reasonIfUnsupported,
2475 "Reference UnidirectionalSequenceLstm: InputToInputWeights "
2476 "is not a supported type.");
2477 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToInputWeights(), supportedWeightTypes),
2478 reasonIfUnsupported,
2479 "Reference UnidirectionalSequenceLstm: RecurrentToInputWeights "
2480 "is not a supported type.");
2481 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
2482 "Reference UnidirectionalSequenceLstm: input and InputGateBias types "
2483 "are mismatched");
2484 if (descriptor.m_PeepholeEnabled)
2485 {
2486 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToInputWeights(), supportedWeightTypes),
2487 reasonIfUnsupported,
2488 "Reference UnidirectionalSequenceLstm: CellToInputWeights "
2489 "is not a supported type.");
2490 }
2491 }
2492 if (descriptor.m_PeepholeEnabled)
2493 {
2494 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToForgetWeights(), supportedWeightTypes),
2495 reasonIfUnsupported,
2496 "Reference UnidirectionalSequenceLstm: CellToForgetWeights "
2497 "is not a supported type.");
2498 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToOutputWeights(), supportedWeightTypes),
2499 reasonIfUnsupported,
2500 "Reference UnidirectionalSequenceLstm: CellToOutputWeights "
2501 "is not a supported type.");
2502 }
2503 if (descriptor.m_ProjectionEnabled)
2504 {
2505 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetProjectionWeights(), supportedWeightTypes),
2506 reasonIfUnsupported,
2507 "Reference UnidirectionalSequenceLstm: ProjectionWeights "
2508 "is not a supported type.");
2509 if (paramsInfo.m_ProjectionBias != nullptr)
2510 {
2511 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
2512 "Reference UnidirectionalSequenceLstm: input and ProjectionBias types "
2513 "are mismatched");
2514 }
2515 }
2516 if (descriptor.m_LayerNormEnabled)
2517 {
2518 if (!descriptor.m_CifgEnabled)
2519 {
2520 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputLayerNormWeights(), supportedWeightTypes),
2521 reasonIfUnsupported,
2522 "Reference UnidirectionalSequenceLstm: InputLayerNormWeights "
2523 "is not a supported type.");
2524 }
2525 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetForgetLayerNormWeights(), supportedWeightTypes),
2526 reasonIfUnsupported,
2527 "Reference UnidirectionalSequenceLstm: ForgetLayerNormWeights "
2528 "is not a supported type.");
2529 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellLayerNormWeights(), supportedWeightTypes),
2530 reasonIfUnsupported,
2531 "Reference UnidirectionalSequenceLstm: CellLayerNormWeights "
2532 "is not a supported type.");
2533 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetOutputLayerNormWeights(), supportedWeightTypes),
2534 reasonIfUnsupported,
2535 "Reference UnidirectionalSequenceLstm: OutputLayerNormWeights "
2536 "is not a supported type.");
2537 }
2538
2539 return supported;
2540}
2541
arovir011c7c81b2018-10-08 11:34:28 +01002542} // namespace armnn