blob: 4d4f01467163907fe187f181678996e68f7504ca [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "RefLayerSupport.hpp"
David Beck3cc9a622018-10-12 10:38:31 +01007
Keith Davis0c2eeac2020-02-11 16:51:50 +00008#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009#include <armnn/Types.hpp>
Derek Lamberti50db4e82019-03-13 14:16:15 +000010#include <armnn/Descriptors.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000011#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010012#include <armnn/utility/NumericCast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <LayerSupportCommon.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010015#include <backendsCommon/LayerSupportRules.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000016
Derek Lamberti50db4e82019-03-13 14:16:15 +000017#include <vector>
Derek Lamberti50db4e82019-03-13 14:16:15 +000018#include <array>
19
telsoa014fcda012018-03-09 14:13:49 +000020namespace armnn
21{
22
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010023namespace
24{
25
26template<typename Float32Func, typename Uint8Func, typename ... Params>
27bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
28 DataType dataType,
29 Float32Func floatFuncPtr,
30 Uint8Func uint8FuncPtr,
31 Params&&... params)
32{
33 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
34 dataType,
35 &FalseFunc<Params...>,
36 floatFuncPtr,
37 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +000038 &FalseFunc<Params...>,
kevmay012b4d88e2019-01-24 14:05:09 +000039 &FalseFunc<Params...>,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010040 std::forward<Params>(params)...);
41}
42
43} // anonymous namespace
44
James Conroy4d1ff582019-06-10 17:06:39 +010045namespace
46{
47
48std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
49 unsigned int actual,
50 std::string& layerStr,
51 std::string& tensorName)
52{
53 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
54 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
55
56 return errorMsg;
57}
58
59} // anonymous namespace
Derek Lamberti50db4e82019-03-13 14:16:15 +000060
arovir011c7c81b2018-10-08 11:34:28 +010061bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
62 const TensorInfo& output,
63 const ActivationDescriptor& descriptor,
64 Optional<std::string&> reasonIfUnsupported) const
65{
Derek Lamberti50db4e82019-03-13 14:16:15 +000066 bool supported = true;
67
68 // Define supported types.
Keith Davis0c2eeac2020-02-11 16:51:50 +000069 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000070 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +000071 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +010072 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +000073 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +000074 DataType::QAsymmU8,
75 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +000076 };
77
78 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
79 "Reference activation: input type not supported.");
80
81 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
82 "Reference activation: output type not supported.");
83
84 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
85 "Reference activation: input and output types mismatched.");
86
87 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
88 "Reference activation: input and output shapes are of different rank.");
89
90
91 struct ActivationFunctionSupported : public Rule
92 {
93 ActivationFunctionSupported(const ActivationDescriptor& desc)
94 {
95 switch(desc.m_Function)
96 {
97 case ActivationFunction::Abs:
98 case ActivationFunction::BoundedReLu:
David Monahan3b3c3812020-02-25 09:03:29 +000099 case ActivationFunction::Elu:
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000100 case ActivationFunction::HardSwish:
Derek Lamberti50db4e82019-03-13 14:16:15 +0000101 case ActivationFunction::LeakyReLu:
102 case ActivationFunction::Linear:
103 case ActivationFunction::ReLu:
104 case ActivationFunction::Sigmoid:
105 case ActivationFunction::SoftReLu:
106 case ActivationFunction::Sqrt:
107 case ActivationFunction::Square:
108 case ActivationFunction::TanH:
109 {
110 m_Res = true;
111 break;
112 }
113 default:
114 {
115 m_Res = false;
116 break;
117 }
118 }
119 }
120 };
121
122 // Function is supported
123 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
124 "Reference activation: function not supported.");
125
126 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100127}
128
129bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
130 const TensorInfo& input1,
131 const TensorInfo& output,
132 Optional<std::string&> reasonIfUnsupported) const
133{
Derek Lamberti50db4e82019-03-13 14:16:15 +0000134 bool supported = true;
135
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100136 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000137 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +0000138 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100139 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000140 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000141 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100142 DataType::QSymmS16,
143 DataType::Signed32
Derek Lamberti50db4e82019-03-13 14:16:15 +0000144 };
145
146 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
147 "Reference addition: input 0 is not a supported type.");
148
149 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
150 "Reference addition: input 1 is not a supported type.");
151
152 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
153 "Reference addition: output is not a supported type.");
154
155 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
156 "Reference addition: input 0 and Input 1 types are mismatched");
157
158 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
159 "Reference addition: input and output types are mismatched");
160
161 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
162 "Reference addition: shapes are not suitable for implicit broadcast.");
163
164 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100165}
166
Nikhil Raj68c2c902019-09-19 11:21:11 +0100167bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
168 const armnn::ArgMinMaxDescriptor &descriptor,
169 armnn::Optional<std::string &> reasonIfUnsupported) const
170{
Jan Eilers8eb25602020-03-09 12:13:48 +0000171 IgnoreUnused(descriptor);
Nikhil Raj68c2c902019-09-19 11:21:11 +0100172
Mike Kelly1f140f72021-04-06 12:25:55 +0100173 std::array<DataType, 8> supportedInputTypes =
Nikhil Raj68c2c902019-09-19 11:21:11 +0100174 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000175 DataType::BFloat16,
Teresa Charline300b362020-05-25 10:01:03 +0100176 DataType::Float16,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100177 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +0100178 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000179 DataType::QAsymmU8,
180 DataType::QSymmS16,
Mike Kelly1f140f72021-04-06 12:25:55 +0100181 DataType::Signed32,
182 DataType::Signed64
183 };
184
185 std::array<DataType,2> supportedOutputTypes = {
186 DataType::Signed32,
187 DataType::Signed64
Nikhil Raj68c2c902019-09-19 11:21:11 +0100188 };
189
190 bool supported = true;
191
Mike Kelly1f140f72021-04-06 12:25:55 +0100192 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100193 "Reference ArgMinMax: input is not a supported type.");
Mike Kelly1f140f72021-04-06 12:25:55 +0100194 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100195 "Reference ArgMinMax: output type not supported");
196
197 return supported;
198}
199
arovir011c7c81b2018-10-08 11:34:28 +0100200bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
201 const TensorInfo& output,
202 const TensorInfo& mean,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100203 const TensorInfo& variance,
arovir011c7c81b2018-10-08 11:34:28 +0100204 const TensorInfo& beta,
205 const TensorInfo& gamma,
206 const BatchNormalizationDescriptor& descriptor,
207 Optional<std::string&> reasonIfUnsupported) const
208{
Jan Eilers8eb25602020-03-09 12:13:48 +0000209 IgnoreUnused(descriptor);
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100210
Sadik Armagan303980c2020-04-17 12:45:14 +0100211 std::array<DataType, 6> supportedTypes =
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100212 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000213 DataType::BFloat16,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100214 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100215 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100216 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000217 DataType::QAsymmU8,
218 DataType::QSymmS16
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100219 };
220
221 bool supported = true;
222
223 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
224 "Reference batch normalization: input is not a supported type.");
225
226 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
227 "Reference batch normalization: output is not a supported type.");
228
229 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
230 "Reference batch normalization: input and output types are mismatched");
231
232 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
233 "Reference batch normalization: mean is not a supported type.");
234
235 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
236 "Reference batch normalization: variance is not a supported type.");
237
238 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
239 "Reference batch normalization: beta is not a supported type.");
240
241 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
242 "Reference batch normalization: gamma is not a supported type.");
243
244 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100245}
246
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000247bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
248 const TensorInfo& output,
249 const BatchToSpaceNdDescriptor& descriptor,
250 Optional<std::string&> reasonIfUnsupported) const
251{
Jan Eilers8eb25602020-03-09 12:13:48 +0000252 IgnoreUnused(descriptor);
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100253
254 bool supported = true;
255
256 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
257 std::string inputTensorStr = "input";
258 std::string outputTensorStr = "output";
259
260 // Define supported types.
Sadik Armagan303980c2020-04-17 12:45:14 +0100261 std::array<DataType,6> supportedTypes =
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100262 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000263 DataType::BFloat16,
264 DataType::Float32,
265 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100266 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000267 DataType::QAsymmU8,
268 DataType::QSymmS16
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100269 };
270
271 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
272 "Reference BatchToSpaceNd: input type not supported.");
273
274 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
275 "Reference BatchToSpaceNd: output type not supported.");
276
277 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
278 "Reference BatchToSpaceNd: input and output types mismatched.");
279
280 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
281 reasonIfUnsupported,
282 CreateIncorrectDimensionsErrorMsg(4,
283 output.GetNumDimensions(),
284 batchToSpaceNdLayerStr,
285 outputTensorStr).data());
286
287 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
288 reasonIfUnsupported,
289 CreateIncorrectDimensionsErrorMsg(4,
290 input.GetNumDimensions(),
291 batchToSpaceNdLayerStr,
292 inputTensorStr).data());
293
294 return supported;
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000295}
296
mathad01b392e982021-04-07 12:07:30 +0100297bool RefLayerSupport::IsCastSupported(const TensorInfo& input,
298 const TensorInfo& output,
299 Optional<std::string&> reasonIfUnsupported) const
300{
301 std::array<DataType, 9> supportedInputTypes =
302 {
303 DataType::BFloat16,
304 DataType::Float32,
305 DataType::Float16,
306 DataType::QSymmS8,
307 DataType::QAsymmS8,
308 DataType::QAsymmU8,
309 DataType::QSymmS16,
310 DataType::Signed32
311 };
312
313 bool supported = true;
314 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
315 "Reference cast: input is not a supported type");
316
317
318 supported &= CheckSupportRule(TypeAnyOf(output, supportedInputTypes), reasonIfUnsupported,
319 "Reference cast: output is not a supported type");
320
321 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
322 "Reference cast: input and output shapes have different number of total elements");
323
324 return supported;
325}
326
Simon Obute51f67772021-09-03 15:50:13 +0100327bool RefLayerSupport::IsChannelShuffleSupported(const TensorInfo& input,
328 const TensorInfo& output,
329 const ChannelShuffleDescriptor& descriptor,
330 Optional<std::string&> reasonIfUnsupported) const
331{
332 IgnoreUnused(descriptor);
333 bool supported = true;
334
335 // Define supported output and inputs types.
336 std::array<DataType, 7> supportedTypes =
337 {
338 DataType::BFloat16,
339 DataType::Float32,
340 DataType::Float16,
341 DataType::QAsymmS8,
342 DataType::QAsymmU8,
343 DataType::QSymmS8,
344 DataType::QSymmS16
345 };
346
347 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
348 "Reference ChannelShuffle: input is not a supported type.");
349
350 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
351 "Reference ChannelShuffle: output is not a supported type.");
352
353 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
354 "Reference ChannelShuffle: input and output types are mismatched.");
355
356 return supported;
357}
358
359
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100360bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
361 const TensorInfo& input1,
362 const TensorInfo& output,
363 const ComparisonDescriptor& descriptor,
364 Optional<std::string&> reasonIfUnsupported) const
365{
Jan Eilers8eb25602020-03-09 12:13:48 +0000366 IgnoreUnused(descriptor);
Sadik Armagan303980c2020-04-17 12:45:14 +0100367 std::array<DataType, 8> supportedInputTypes =
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100368 {
Sadik Armaganb60dd242020-03-19 13:53:16 +0000369 DataType::Boolean,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000370 DataType::BFloat16,
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100371 DataType::Float32,
372 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100373 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000374 DataType::QAsymmU8,
Sadik Armaganb60dd242020-03-19 13:53:16 +0000375 DataType::QSymmS16,
376 DataType::Signed32
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100377 };
378
379 bool supported = true;
380 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
381 "Reference comparison: input 0 is not a supported type");
382
383 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
384 "Reference comparison: input 0 and Input 1 types are mismatched");
385
386 supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
387 "Reference comparison: output is not of type Boolean");
388
389 return supported;
390}
391
Jim Flynn906f9462019-05-10 13:55:21 +0100392bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
393 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +0100394 const ConcatDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100395 Optional<std::string&> reasonIfUnsupported) const
396{
Jan Eilers8eb25602020-03-09 12:13:48 +0000397 IgnoreUnused(descriptor);
Jim Flynne242f2d2019-05-22 14:24:13 +0100398
399 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000400 std::array<DataType,6> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100401 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000402 DataType::BFloat16,
403 DataType::Float32,
404 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000405 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100406 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000407 DataType::QSymmS16
Jim Flynne242f2d2019-05-22 14:24:13 +0100408 };
409
410 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
411 "Reference concatenation: output type not supported");
412 for (const TensorInfo* input : inputs)
413 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100414 ARMNN_ASSERT(input != nullptr);
Jim Flynne242f2d2019-05-22 14:24:13 +0100415 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
416 "Reference concatenation: input type not supported");
417
418 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
419 "Reference concatenation: input and output types mismatched.");
420 }
421
422 return supported;
Jim Flynn906f9462019-05-10 13:55:21 +0100423}
424
arovir011c7c81b2018-10-08 11:34:28 +0100425bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
426 Optional<std::string&> reasonIfUnsupported) const
427{
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100428 std::array<DataType,8> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100429 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000430 DataType::BFloat16,
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100431 DataType::Float16,
Nina Drozd58ef2c62019-05-16 12:09:18 +0100432 DataType::Float32,
Keith Davis67e6c542020-02-19 10:08:33 +0000433 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100434 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000435 DataType::QSymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100436 DataType::QSymmS16,
437 DataType::Signed32
Nina Drozd58ef2c62019-05-16 12:09:18 +0100438 };
439
440 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
441 "Reference constant: output is not a supported type.");
arovir011c7c81b2018-10-08 11:34:28 +0100442}
443
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000444bool RefLayerSupport::IsConvertBf16ToFp32Supported(const TensorInfo& input,
445 const TensorInfo& output,
446 Optional<std::string&> reasonIfUnsupported) const
447{
448 bool supported = true;
449
450 supported &= CheckSupportRule(TypeIs(input, DataType::BFloat16), reasonIfUnsupported,
451 "Reference for ConvertBf16ToFp32 layer: input type not supported");
452
453 supported &= CheckSupportRule(TypeIs(output, DataType::Float32), reasonIfUnsupported,
454 "Reference for ConvertBf16ToFp32 layer: output type not supported");
455
456 return supported;
457}
458
arovir011c7c81b2018-10-08 11:34:28 +0100459bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
460 const TensorInfo& output,
461 Optional<std::string&> reasonIfUnsupported) const
462{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100463 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
464 input.GetDataType(),
465 &TrueFunc<>,
466 &FalseInputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000467 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000468 &FalseFuncI32<>,
469 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100470 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
471 output.GetDataType(),
472 &FalseOutputFuncF16<>,
473 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000474 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000475 &FalseFuncI32<>,
476 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100477}
478
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000479bool RefLayerSupport::IsConvertFp32ToBf16Supported(const TensorInfo& input,
480 const TensorInfo& output,
481 Optional<std::string&> reasonIfUnsupported) const
482{
483 bool supported = true;
484
485 supported &= CheckSupportRule(TypeIs(input, DataType::Float32), reasonIfUnsupported,
486 "Reference for ConvertFp32ToBf16 layer: input type not supported");
487
488 supported &= CheckSupportRule(TypeIs(output, DataType::BFloat16), reasonIfUnsupported,
489 "Reference for ConvertFp32ToBf16 layer: output type not supported");
490
491 return supported;
492}
493
arovir011c7c81b2018-10-08 11:34:28 +0100494bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
495 const TensorInfo& output,
496 Optional<std::string&> reasonIfUnsupported) const
497{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100498 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
499 input.GetDataType(),
500 &FalseInputFuncF16<>,
501 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000502 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000503 &FalseFuncI32<>,
504 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100505 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
506 output.GetDataType(),
507 &TrueFunc<>,
508 &FalseOutputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000509 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000510 &FalseFuncI32<>,
511 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100512}
513
514bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
515 const TensorInfo& output,
516 const Convolution2dDescriptor& descriptor,
517 const TensorInfo& weights,
518 const Optional<TensorInfo>& biases,
519 Optional<std::string&> reasonIfUnsupported) const
520{
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100521 bool supported = true;
522
523 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000524 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000525 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000526 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000527 DataType::Float32,
528 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000529 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100530 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000531 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000532 DataType::QSymmS16
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100533 };
534
535 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000536 "Reference Convolution2d: input is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100537
538 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000539 "Reference Convolution2d: output is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100540
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000541 // For Convolution2d, we allow to have BFloat16 input with Float32 output for optimization.
542 if (input.GetDataType() == DataType::BFloat16)
543 {
544 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
545 {
546 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
547 supported = false;
548 }
549 }
550 else
551 {
552 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000553 "Reference Convolution2d: input and output types mismatched.");
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000554 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100555
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000556 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000557 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000558 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100559 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000560 {
Sadik Armagan303980c2020-04-17 12:45:14 +0100561 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000562 DataType::QAsymmU8,
Jan Eilers1b2654f2021-09-24 15:45:46 +0100563 DataType::QSymmS8
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000564 };
565
566 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000567 "Reference Convolution2d: weights type not supported for quantized input.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000568 }
569 else
570 {
571 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000572 "Reference Convolution2d: weights is not a supported type.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000573
574 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000575 "Reference Convolution2d: input and weights types mismatched.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000576 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100577
578 if (biases.has_value())
579 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000580 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000581 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000582 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000583 DataType::Float32,
584 DataType::Float16,
585 DataType::Signed32
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100586 };
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000587
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100588 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000589 "Reference Convolution2d: biases is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100590 }
Jan Eilers8eb25602020-03-09 12:13:48 +0000591 IgnoreUnused(descriptor);
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100592
593 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100594}
595
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100596bool RefLayerSupport::IsConvolution3dSupported(const TensorInfo& input,
597 const TensorInfo& output,
598 const Convolution3dDescriptor& descriptor,
599 const TensorInfo& weights,
600 const Optional<TensorInfo>& biases,
601 Optional<std::string&> reasonIfUnsupported) const
602{
603 bool supported = true;
604
605 // Define supported types.
606 std::array<DataType,7> supportedTypes =
607 {
608 DataType::BFloat16,
609 DataType::Float32,
610 DataType::Float16,
611 DataType::QAsymmS8,
612 DataType::QAsymmU8,
613 DataType::QSymmS8,
614 DataType::QSymmS16
615 };
616
617 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
618 "Reference Convolution3d: input is not a supported type.");
619
620 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
621 "Reference Convolution3d: output is not a supported type.");
622
623 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
624 "Reference Convolution3d: input and output types mismatched.");
625
626 const DataType inputType = input.GetDataType();
627 if (IsQuantized8BitType(inputType))
628 {
629 std::array<DataType, 3> supportedWeightTypes =
630 {
631 DataType::QAsymmS8,
632 DataType::QAsymmU8,
633 DataType::QSymmS8
634 };
635
636 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
637 "Reference Convolution3d: weights type not supported for quantized input.");
638 }
639 else
640 {
641 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
642 "Reference Convolution3d: weights is not a supported type.");
643
644 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
645 "Reference Convolution3d: input and weights types mismatched.");
646 }
647
648 if (biases.has_value())
649 {
650 std::array<DataType,4> biasesSupportedTypes =
651 {
652 DataType::BFloat16,
653 DataType::Float32,
654 DataType::Float16,
655 DataType::Signed32
656 };
657
658 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
659 "Reference Convolution3d: biases is not a supported type.");
660 }
661 IgnoreUnused(descriptor);
662
663 return supported;
664}
665
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000666bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
667 const TensorInfo& output,
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000668 Optional<std::string&> reasonIfUnsupported) const
669{
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100670 bool supported = true;
671
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000672 std::array<DataType, 8> supportedTypes =
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100673 {
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000674 DataType::BFloat16,
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000675 DataType::Float16,
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100676 DataType::Float32,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000677 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100678 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000679 DataType::QSymmS8,
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000680 DataType::QSymmS16,
681 DataType::Signed32
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100682 };
683
684 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000685 "Reference for Debug layer: input type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100686
687 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000688 "Reference for Debug layer: output type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100689
690 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000691 "Reference for Debug layer: input and output types are mismatched");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100692
693 return supported;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000694}
695
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100696bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
697 const TensorInfo& output,
698 const DepthToSpaceDescriptor& descriptor,
699 Optional<std::string&> reasonIfUnsupported) const
700{
Jan Eilers8eb25602020-03-09 12:13:48 +0000701 IgnoreUnused(descriptor);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100702 bool supported = true;
703
Sadik Armagan303980c2020-04-17 12:45:14 +0100704 std::array<DataType,6> supportedTypes =
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100705 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000706 DataType::BFloat16,
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100707 DataType::Float32,
708 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100709 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000710 DataType::QAsymmU8,
711 DataType::QSymmS16
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100712 };
713
714 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
715 "Reference DepthToSpace: input type not supported");
716
717 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
718 "Reference DepthToSpace: output type not supported");
719
720 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
721 "Reference DepthToSpace: input and output types are mismatched");
722
723 return supported;
724}
725
arovir011c7c81b2018-10-08 11:34:28 +0100726bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
727 const TensorInfo& output,
728 const DepthwiseConvolution2dDescriptor& descriptor,
729 const TensorInfo& weights,
730 const Optional<TensorInfo>& biases,
731 Optional<std::string&> reasonIfUnsupported) const
732{
Sadik Armagan303980c2020-04-17 12:45:14 +0100733 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100734 bool supported = true;
735
736 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000737 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100738 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000739 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100740 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100741 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000742 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000743 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100744 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000745 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100746 };
747
748 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
749 "Reference DepthwiseConvolution2d: input is not a supported type.");
750
751 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
752 "Reference DepthwiseConvolution2d: output is not a supported type.");
753
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100754 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
755 "Reference DepthwiseConvolution2d: input and output types mismatched.");
756
Teresa Charlind8df0262019-11-11 12:28:15 +0000757 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000758 if (IsQuantized8BitType(inputType))
Teresa Charlind8df0262019-11-11 12:28:15 +0000759 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100760 std::array<DataType, 3> supportedWeightTypes =
Sadik Armagan303980c2020-04-17 12:45:14 +0100761 {
762 DataType::QAsymmS8,
763 DataType::QAsymmU8,
764 DataType::QSymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100765 };
Teresa Charlind8df0262019-11-11 12:28:15 +0000766
767 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Sadik Armagan303980c2020-04-17 12:45:14 +0100768 "Reference DepthwiseConvolution2d: weights type not supported for "
769 "quantized input.");
Teresa Charlind8df0262019-11-11 12:28:15 +0000770 }
771 else
772 {
773 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
774 "Reference DepthwiseConvolution2d: weights is not a supported type.");
775
776 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
777 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
778 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100779
780 if (biases.has_value())
781 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000782 std::array<DataType,4> biasesSupportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100783 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000784 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100785 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100786 DataType::Float16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100787 DataType::Signed32
788 };
789 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
790 "Reference DepthwiseConvolution2d: biases is not a supported type.");
791 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100792
793 return supported;
794
arovir011c7c81b2018-10-08 11:34:28 +0100795}
796
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000797bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
798 const TensorInfo& output,
799 Optional<std::string&> reasonIfUnsupported) const
800{
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100801 bool supported = true;
802
Ryan OShea9add1202020-02-07 10:06:33 +0000803 std::array<DataType,4> supportedInputTypes = {
804 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000805 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +0000806 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000807 DataType::QSymmS16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100808 };
809
810 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000811 "Reference for Dequantize layer: input type not supported.");
812
Derek Lambertid466a542020-01-22 15:37:29 +0000813 supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
Teresa Charlin1b1950d2021-06-02 20:23:21 +0100814 "Reference for Dequantize layer: per-axis quantized input not supported.");
Derek Lambertid466a542020-01-22 15:37:29 +0000815
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000816 std::array<DataType,3> supportedOutputTypes = {
817 DataType::BFloat16,
Jan Eilersf7107932019-11-01 11:09:36 +0000818 DataType::Float32,
819 DataType::Float16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100820 };
821
822 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000823 "Reference for Dequantize layer: output type not supported.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100824
825 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000826 "Reference for Dequantize layer: input/output shapes have different num total "
827 "elements.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100828
829 return supported;
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000830}
831
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000832bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
833 const TensorInfo& scores,
834 const TensorInfo& anchors,
835 const TensorInfo& detectionBoxes,
836 const TensorInfo& detectionClasses,
837 const TensorInfo& detectionScores,
838 const TensorInfo& numDetections,
839 const DetectionPostProcessDescriptor& descriptor,
840 Optional<std::string&> reasonIfUnsupported) const
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000841{
Jan Eilers8eb25602020-03-09 12:13:48 +0000842 IgnoreUnused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
Derek Lamberti901ea112019-12-10 22:07:09 +0000843
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100844 bool supported = true;
845
Sadik Armaganaa41d5d2020-11-16 14:27:52 +0000846 std::array<DataType,6> supportedInputTypes =
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100847 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000848 DataType::BFloat16,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100849 DataType::Float32,
Sadik Armaganaa41d5d2020-11-16 14:27:52 +0000850 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100851 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000852 DataType::QAsymmU8,
853 DataType::QSymmS16
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100854 };
855
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000856 supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100857 "Reference DetectionPostProcess: input 0 is not a supported type.");
858
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000859 supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100860 "Reference DetectionPostProcess: input 1 is not a supported type.");
861
862 return supported;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000863}
864
Pablo Tellof0bd6832019-04-26 17:58:13 +0100865bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
866 const TensorInfo& output,
867 const DepthwiseConvolution2dDescriptor& descriptor,
868 const TensorInfo& weights,
869 const Optional<TensorInfo>& biases,
870 Optional<std::string&> reasonIfUnsupported) const
871{
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100872 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
Pablo Tellof0bd6832019-04-26 17:58:13 +0100873}
874
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100875bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
arovir011c7c81b2018-10-08 11:34:28 +0100876 const TensorInfo& input1,
877 const TensorInfo& output,
878 Optional<std::string&> reasonIfUnsupported) const
879{
Sadik Armagan2999a022019-04-09 14:20:12 +0100880 bool supported = true;
881
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100882 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000883 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +0100884 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100885 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100886 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000887 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100888 DataType::QSymmS16,
889 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +0100890 };
891
892 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
893 "Reference division: input 0 is not a supported type.");
894
895 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
896 "Reference division: input 1 is not a supported type.");
897
898 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
899 "Reference division: output is not a supported type.");
900
901 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
902 "Reference division: input 0 and Input 1 types are mismatched");
903
904 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
905 "Reference division: input and output types are mismatched");
906
907 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
908 "Reference division: shapes are not suitable for implicit broadcast.");
909
910 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100911}
912
josh minor4a3c6102020-01-06 16:40:46 -0600913bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
914 const TensorInfo& output,
915 const ElementwiseUnaryDescriptor& descriptor,
916 Optional<std::string&> reasonIfUnsupported) const
917{
Jan Eilers8eb25602020-03-09 12:13:48 +0000918 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600919
Sadik Armagan303980c2020-04-17 12:45:14 +0100920 std::array<DataType, 7> supportedTypes =
josh minor4a3c6102020-01-06 16:40:46 -0600921 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000922 DataType::BFloat16,
josh minor4a3c6102020-01-06 16:40:46 -0600923 DataType::Float32,
924 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100925 DataType::QAsymmS8,
josh minor4a3c6102020-01-06 16:40:46 -0600926 DataType::QAsymmU8,
Sadik Armaganac472102020-03-24 09:54:36 +0000927 DataType::QSymmS16,
928 DataType::Signed32
josh minor4a3c6102020-01-06 16:40:46 -0600929 };
930
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000931 std::array<DataType, 1> logicalSupportedTypes =
932 {
933 DataType::Boolean
934 };
935
josh minor4a3c6102020-01-06 16:40:46 -0600936 bool supported = true;
937
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000938 if (descriptor.m_Operation == UnaryOperation::LogicalNot)
939 {
940 supported &= CheckSupportRule(TypeAnyOf(input, logicalSupportedTypes), reasonIfUnsupported,
941 "Reference elementwise unary: input type not supported");
josh minor4a3c6102020-01-06 16:40:46 -0600942
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000943 supported &= CheckSupportRule(TypeAnyOf(output, logicalSupportedTypes), reasonIfUnsupported,
944 "Reference elementwise unary: output type not supported");
945 }
946 else
947 {
948 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
949 "Reference elementwise unary: input type not supported");
950
951 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
952 "Reference elementwise unary: output type not supported");
953 }
josh minor4a3c6102020-01-06 16:40:46 -0600954
955 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
956 "Reference elementwise unary: input and output types not matching");
957
958 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
959 "Reference elementwise unary: input and output shapes"
960 "have different number of total elements");
961
962 return supported;
963}
964
arovir011c7c81b2018-10-08 11:34:28 +0100965bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
966 const FakeQuantizationDescriptor& descriptor,
967 Optional<std::string&> reasonIfUnsupported) const
968{
Jan Eilers8eb25602020-03-09 12:13:48 +0000969 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100970 bool supported = true;
971
972 std::array<DataType,1> supportedTypes =
973 {
974 DataType::Float32
975 };
976
977 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
978 "Reference fake quantization: input type not supported.");
979
980 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100981}
982
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100983bool RefLayerSupport::IsFillSupported(const TensorInfo& input,
984 const TensorInfo& output,
985 const FillDescriptor& descriptor,
986 Optional<std::string&> reasonIfUnsupported) const
987{
988 IgnoreUnused(descriptor);
989 IgnoreUnused(output);
990
991 bool supported = true;
992
Sadik Armagana792a052020-06-23 16:22:23 +0100993 std::array<DataType,3> supportedTypes =
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100994 {
995 DataType::Float32,
Sadik Armagana792a052020-06-23 16:22:23 +0100996 DataType::Float16,
997 DataType::Signed32
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100998 };
999
Teresa Charlin4b10fef2020-07-29 09:36:41 +01001000 supported &= CheckSupportRule(TypeIs(input, DataType::Signed32), reasonIfUnsupported,
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001001 "Reference Fill: input type not supported.");
1002
Teresa Charlin44088502020-07-27 11:27:19 +01001003 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1004 "Reference Fill: output type not supported.");
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001005 return supported;
1006}
1007
arovir011c7c81b2018-10-08 11:34:28 +01001008bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
1009 const TensorInfo& output,
1010 Optional<std::string&> reasonIfUnsupported) const
1011{
Jan Eilers8eb25602020-03-09 12:13:48 +00001012 IgnoreUnused(output);
James Conroy83735b12019-05-30 16:36:59 +01001013 bool supported = true;
1014
Francis Murtaghe8ac1332020-07-30 18:03:40 +01001015 std::array<DataType,3> supportedTypes =
James Conroy83735b12019-05-30 16:36:59 +01001016 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001017 DataType::BFloat16,
James Conroyb40d7102019-06-04 12:32:09 +01001018 DataType::Float32,
Francis Murtaghe8ac1332020-07-30 18:03:40 +01001019 DataType::Float16
James Conroy83735b12019-05-30 16:36:59 +01001020 };
1021
1022 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1023 "Reference Floor: input type not supported.");
1024
1025 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1026 "Reference Floor: output type not supported.");
1027
1028 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001029}
1030
1031bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
1032 const TensorInfo& output,
1033 const TensorInfo& weights,
1034 const TensorInfo& biases,
1035 const FullyConnectedDescriptor& descriptor,
1036 Optional<std::string&> reasonIfUnsupported) const
1037{
Francis Murtagh46c09d02019-05-28 08:15:28 +01001038 bool supported = true;
1039
1040 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001041 std::array<DataType,6> supportedTypes =
Francis Murtagh46c09d02019-05-28 08:15:28 +01001042 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001043 DataType::BFloat16,
1044 DataType::Float32,
1045 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001046 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001047 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001048 DataType::QSymmS16
Francis Murtagh46c09d02019-05-28 08:15:28 +01001049 };
1050
1051 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1052 "Reference Fully Connected: input type not supported.");
1053
1054 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1055 "Reference Fully Connected: output type not supported.");
1056
Francis Murtagh46c09d02019-05-28 08:15:28 +01001057 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1058 "Reference Fully Connected: weights type not supported.");
1059
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +00001060 // For FullyConnected, we allow to have BFloat16 input with Float32 output for optimization.
1061 if (input.GetDataType() == DataType::BFloat16)
1062 {
1063 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
1064 {
1065 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
1066 supported = false;
1067 }
1068 }
1069 else
1070 {
1071 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1072 "Reference Fully Connected: input and output types mismatched.");
1073 }
1074
Jan Eilers1f45dc32020-06-15 11:43:03 +01001075 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1076 "Reference Fully Connected: weights is not a supported type.");
Francis Murtaghddb1d062020-03-10 13:51:45 +00001077
Jan Eilers1f45dc32020-06-15 11:43:03 +01001078 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
1079 "Reference Fully Connected: input and weights types mismatched.");
Francis Murtagh46c09d02019-05-28 08:15:28 +01001080
1081 if (descriptor.m_BiasEnabled)
1082 {
1083 // Defined supported types for bias
Sadik Armagandb73c982020-04-01 17:35:30 +01001084 std::array<DataType, 5>
Francis Murtagh46c09d02019-05-28 08:15:28 +01001085 supportedBiasTypes =
1086 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001087 DataType::BFloat16,
Francis Murtagh46c09d02019-05-28 08:15:28 +01001088 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001089 DataType::Float16,
Sadik Armagandb73c982020-04-01 17:35:30 +01001090 DataType::Signed32,
1091 DataType::QAsymmS8
Francis Murtagh46c09d02019-05-28 08:15:28 +01001092 };
1093
1094 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
1095 "Reference Fully Connected: bias type not supported.");
1096
1097 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
1098 "Reference Fully Connected: bias and weight types mismatch.");
1099
1100 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
1101 "Reference Fully Connected: bias type inferred from weights is incompatible.");
1102
Narumol Prangnawarat366d7232020-04-29 12:58:17 +01001103 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(biases, 1U), reasonIfUnsupported,
1104 "Reference Fully Connected: bias must have 1 dimension.");
1105
Francis Murtagh46c09d02019-05-28 08:15:28 +01001106 }
1107
1108 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001109}
1110
narpra014951d842019-01-18 16:53:53 +00001111bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
1112 const armnn::TensorInfo& input1,
1113 const armnn::TensorInfo& output,
Teresa Charlin52664732020-06-29 16:27:03 +01001114 const GatherDescriptor& descriptor,
narpra014951d842019-01-18 16:53:53 +00001115 armnn::Optional<std::string&> reasonIfUnsupported) const
1116{
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001117 bool supported = true;
Teresa Charlin3940d8b2020-05-29 16:47:23 +01001118 std::array<DataType,7> supportedTypes =
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001119 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001120 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001121 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001122 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001123 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001124 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01001125 DataType::QSymmS16,
1126 DataType::Signed32
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001127 };
1128
Teresa Charlin52664732020-06-29 16:27:03 +01001129 if (descriptor.m_Axis != 0)
1130 {
1131 reasonIfUnsupported.value() += std::string("Reference Gather: axis not supported\n");
1132 supported &= false;
1133 }
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001134 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1135 "Reference Gather: input type not supported");
1136
1137 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1138 "Reference Gather: output type not supported");
1139
1140 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
1141 "Reference Gather: indices (input1) type not supported");
1142
1143 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1144 "Reference Gather: input and output types not matching");
1145
1146 return supported;
narpra014951d842019-01-18 16:53:53 +00001147}
1148
Derek Lamberti901ea112019-12-10 22:07:09 +00001149bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
1150 Optional<std::string&> /*reasonIfUnsupported*/) const
arovir011c7c81b2018-10-08 11:34:28 +01001151{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001152 return true;
arovir011c7c81b2018-10-08 11:34:28 +01001153}
1154
Kevin May09ca49c2019-10-09 12:37:34 +01001155bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
1156 const TensorInfo& output,
1157 const InstanceNormalizationDescriptor& descriptor,
1158 Optional<std::string&> reasonIfUnsupported) const
1159{
Jan Eilers8eb25602020-03-09 12:13:48 +00001160 IgnoreUnused(descriptor);
Kevin May09ca49c2019-10-09 12:37:34 +01001161 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001162 std::array<DataType, 3> supportedTypes =
Kevin May09ca49c2019-10-09 12:37:34 +01001163 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001164 DataType::BFloat16,
Kevin May09ca49c2019-10-09 12:37:34 +01001165 DataType::Float32,
1166 DataType::Float16
1167 };
1168
1169 bool supported = true;
1170
1171 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1172 "Reference Instance Normalization: input type not supported.");
1173
1174 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1175 "Reference Instance Normalization: output type not supported.");
1176
1177 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1178 "Reference Instance Normalization: input and output types mismatched.");
1179
1180 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1181 "Reference Instance Normalization: input and output shapes have different "
1182 "num total elements.");
1183
1184 return supported;
1185}
1186
arovir011c7c81b2018-10-08 11:34:28 +01001187bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
1188 const TensorInfo& output,
1189 const L2NormalizationDescriptor& descriptor,
1190 Optional<std::string&> reasonIfUnsupported) const
1191{
Jan Eilers8eb25602020-03-09 12:13:48 +00001192 IgnoreUnused(descriptor);
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001193 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001194 std::array<DataType, 6> supportedTypes =
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001195 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001196 DataType::BFloat16,
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001197 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001198 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001199 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001200 DataType::QAsymmU8,
1201 DataType::QSymmS16
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001202 };
1203
1204 bool supported = true;
1205
1206 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1207 "Reference L2normalization: input type not supported.");
1208
1209 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1210 "Reference L2normalization: output type not supported.");
1211
1212 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1213 "Reference L2normalization: input and output types mismatched.");
1214
1215 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1216 "Reference L2normalization: input and output shapes have different "
1217 "num total elements.");
1218
1219 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001220}
1221
James Conroyaba90cd2020-11-06 16:28:18 +00001222bool RefLayerSupport::IsLogicalBinarySupported(const TensorInfo& input0,
1223 const TensorInfo& input1,
1224 const TensorInfo& output,
1225 const LogicalBinaryDescriptor& descriptor,
1226 Optional<std::string&> reasonIfUnsupported) const
1227{
1228 IgnoreUnused(descriptor);
1229
1230 std::array<DataType, 1> supportedTypes =
1231 {
1232 DataType::Boolean
1233 };
1234
1235 bool supported = true;
1236 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1237 "Reference LogicalBinary: input 0 type not supported");
1238 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1239 "Reference LogicalBinary: input 1 type not supported");
1240
1241 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1242 "Reference LogicalBinary: input and output types do not match");
1243
1244 return supported;
1245}
1246
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001247bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
1248 const TensorInfo& output,
1249 const LogSoftmaxDescriptor& descriptor,
1250 Optional<std::string&> reasonIfUnsupported) const
1251{
Jan Eilers8eb25602020-03-09 12:13:48 +00001252 IgnoreUnused(descriptor);
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001253
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001254 std::array<DataType, 3> supportedTypes =
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001255 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001256 DataType::BFloat16,
1257 DataType::Float32,
1258 DataType::Float16
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001259 };
1260
1261 bool supported = true;
1262 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1263 "Reference LogSoftmax: input type not supported");
1264
1265 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1266 "Reference LogSoftmax: output type not supported");
1267
1268 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1269 "Reference LogSoftmax: input and output types do not match");
1270
1271 return supported;
1272}
1273
arovir011c7c81b2018-10-08 11:34:28 +01001274bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1275 const TensorInfo& outputStateIn,
1276 const TensorInfo& cellStateIn,
1277 const TensorInfo& scratchBuffer,
1278 const TensorInfo& outputStateOut,
1279 const TensorInfo& cellStateOut,
1280 const TensorInfo& output,
1281 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001282 const LstmInputParamsInfo& paramsInfo,
1283 Optional<std::string&> reasonIfUnsupported) const
arovir011c7c81b2018-10-08 11:34:28 +01001284{
Jan Eilers8eb25602020-03-09 12:13:48 +00001285 IgnoreUnused(descriptor);
1286 IgnoreUnused(paramsInfo);
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001287
1288 bool supported = true;
1289
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001290 std::array<DataType,3> supportedTypes = {
1291 DataType::BFloat16,
Conor Kennedyb9971c92019-05-07 07:14:23 +01001292 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001293 DataType::QSymmS16
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001294 };
1295
Jan Eilersd01a83c2019-07-03 18:20:40 +01001296 // check inputs and outputs
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001297 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1298 "Reference Lstm: input is not a supported type.");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001299 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1300 "Reference Lstm: input and outputStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001301 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1302 "Reference Lstm: input and cellStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001303 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1304 "Reference Lstm: input and scratchBuffer types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001305 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1306 "Reference Lstm: input and outputStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001307 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1308 "Reference Lstm: input and cellStateOut types are mismatched");
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01001309
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001310 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1311 "Reference Lstm: input and output types are mismatched");
Jan Eilersd01a83c2019-07-03 18:20:40 +01001312 // check layer parameters
Francis Murtaghbb590b42019-08-14 09:51:36 +01001313 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001314 "Reference Lstm: input and InputToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001315 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001316 "Reference Lstm: input and InputToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001317 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001318 "Reference Lstm: input and InputToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001319 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001320 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001321 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001322 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001323 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001324 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001325 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001326 "Reference Lstm: input and ForgetGateBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001327 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001328 "Reference Lstm: input and CellBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001329 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001330 "Reference Lstm: input and OutputGateBias types are mismatched");
1331 if (!descriptor.m_CifgEnabled)
1332 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001333 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001334 "Reference Lstm: input and InputToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001335 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001336 reasonIfUnsupported,
1337 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001338 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001339 "Reference Lstm: input and InputGateBias types are mismatched");
1340 if (descriptor.m_PeepholeEnabled)
1341 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001342 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001343 reasonIfUnsupported,
1344 "Reference Lstm: input and CellToInputWeights types are mismatched");
1345 }
1346 }
1347 if (descriptor.m_PeepholeEnabled)
1348 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001349 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001350 "Reference Lstm: input and CellToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001351 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001352 "Reference Lstm: input and CellToOutputWeights types are mismatched");
1353 }
1354 if (descriptor.m_ProjectionEnabled)
1355 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001356 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001357 "Reference Lstm: input and mProjectionWeights types are mismatched");
1358 if (paramsInfo.m_ProjectionBias != nullptr)
1359 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001360 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001361 "Reference Lstm: input and ProjectionBias types are mismatched");
1362 }
1363 }
1364 if (descriptor.m_LayerNormEnabled)
1365 {
1366 if (!descriptor.m_CifgEnabled)
1367 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001368 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001369 reasonIfUnsupported,
1370 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1371 }
Francis Murtaghbb590b42019-08-14 09:51:36 +01001372 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001373 reasonIfUnsupported,
1374 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001375 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001376 reasonIfUnsupported,
1377 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001378 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001379 reasonIfUnsupported,
1380 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1381 }
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001382
1383 return supported;
telsoa01c577f2c2018-08-31 09:22:23 +01001384}
1385
saoste012df12b32018-11-28 16:57:20 +00001386bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1387 const TensorInfo& input1,
1388 const TensorInfo& output,
1389 Optional<std::string&> reasonIfUnsupported) const
1390{
Sadik Armagan2999a022019-04-09 14:20:12 +01001391 bool supported = true;
1392
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001393 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001394 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001395 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001396 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001397 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001398 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001399 DataType::QSymmS16,
1400 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001401 };
1402
1403 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1404 "Reference maximum: input 0 is not a supported type.");
1405
1406 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1407 "Reference maximum: input 1 is not a supported type.");
1408
1409 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1410 "Reference maximum: output is not a supported type.");
1411
1412 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1413 "Reference maximum: input 0 and Input 1 types are mismatched");
1414
1415 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1416 "Reference maximum: input and output types are mismatched");
1417
1418 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1419 "Reference maximum: shapes are not suitable for implicit broadcast.");
1420
1421 return supported;
saoste012df12b32018-11-28 16:57:20 +00001422}
1423
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001424bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1425 const TensorInfo& output,
1426 const MeanDescriptor& descriptor,
1427 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001428{
James Conroy4d1ff582019-06-10 17:06:39 +01001429 bool supported = true;
1430 std::string meanLayerStr = "Mean";
1431 std::string outputTensorStr = "output";
1432
Sadik Armagan303980c2020-04-17 12:45:14 +01001433 std::array<DataType,6> supportedTypes =
James Conroy4d1ff582019-06-10 17:06:39 +01001434 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001435 DataType::BFloat16,
James Conroy4d1ff582019-06-10 17:06:39 +01001436 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001437 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001438 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001439 DataType::QAsymmU8,
1440 DataType::QSymmS16
James Conroy4d1ff582019-06-10 17:06:39 +01001441 };
1442
1443 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1444 "Reference Mean: input type not supported.");
1445
1446 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1447 "Reference Mean: input and output types are mismatched");
1448
1449 if (descriptor.m_KeepDims)
1450 {
1451 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1452 reasonIfUnsupported,
1453 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1454 output.GetNumDimensions(),
1455 meanLayerStr, outputTensorStr).data());
1456 }
1457 else if (descriptor.m_Axis.empty())
1458 {
1459 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1460 reasonIfUnsupported,
1461 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1462 meanLayerStr, outputTensorStr).data());
1463 }
1464 else
1465 {
Matthew Sloyan171214c2020-09-09 09:07:37 +01001466 auto outputDim = input.GetNumDimensions() - armnn::numeric_cast<unsigned int>(descriptor.m_Axis.size());
James Conroy4d1ff582019-06-10 17:06:39 +01001467
1468 if (outputDim > 0)
1469 {
1470 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1471 reasonIfUnsupported,
1472 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1473 meanLayerStr, outputTensorStr).data());
1474 }
1475 else
1476 {
1477 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1478 reasonIfUnsupported,
1479 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1480 meanLayerStr, outputTensorStr).data());
1481 }
1482 }
1483
1484 return supported;
narpra0132b90462018-09-13 11:07:48 +01001485}
1486
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001487bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1488 const TensorInfo &output,
1489 Optional<std::string &> reasonIfUnsupported) const
1490{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001491 bool supported = true;
1492
Sadik Armagan303980c2020-04-17 12:45:14 +01001493 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001494 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001495 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001496 DataType::Float32,
1497 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001498 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001499 DataType::QAsymmU8,
1500 DataType::QSymmS16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001501 DataType::Boolean
1502 };
1503
1504 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1505 "Reference MemCopy: input type not supported");
1506
1507 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1508 "Reference MemCopy: output type not supported");
1509
1510 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1511 "Reference MemCopy: input and output types are mismatched");
1512
1513 return supported;
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001514}
1515
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001516bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1517 const TensorInfo& input1,
1518 const TensorInfo& output,
1519 Optional<std::string&> reasonIfUnsupported) const
1520{
Sadik Armagan2999a022019-04-09 14:20:12 +01001521 bool supported = true;
1522
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001523 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001524 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001525 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001526 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001527 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001528 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001529 DataType::QSymmS16,
1530 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001531 };
1532
1533 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1534 "Reference minimum: input 0 is not a supported type.");
1535
1536 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1537 "Reference minimum: input 1 is not a supported type.");
1538
1539 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1540 "Reference minimum: output is not a supported type.");
1541
1542 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1543 "Reference minimum: input 0 and Input 1 types are mismatched");
1544
1545 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1546 "Reference minimum: input and output types are mismatched");
1547
1548 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1549 "Reference minimum: shapes are not suitable for implicit broadcast.");
1550
1551 return supported;
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001552}
1553
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001554bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1555 const TensorInfo& input1,
1556 const TensorInfo& output,
1557 Optional<std::string&> reasonIfUnsupported) const
1558{
Sadik Armagan2999a022019-04-09 14:20:12 +01001559 bool supported = true;
1560
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001561 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001562 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001563 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001564 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001565 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001566 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001567 DataType::QSymmS16,
1568 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001569 };
1570
1571 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1572 "Reference multiplication: input 0 is not a supported type.");
1573
1574 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1575 "Reference multiplication: input 1 is not a supported type.");
1576
1577 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1578 "Reference multiplication: output is not a supported type.");
1579
1580 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1581 "Reference multiplication: input 0 and Input 1 types are mismatched");
1582
1583 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1584 "Reference multiplication: input and output types are mismatched");
1585
1586 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1587 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1588
1589 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001590}
1591
1592bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1593 const TensorInfo& output,
1594 const NormalizationDescriptor& descriptor,
1595 Optional<std::string&> reasonIfUnsupported) const
Nina Drozd661dfa72018-10-02 11:14:17 +01001596{
Jan Eilers8eb25602020-03-09 12:13:48 +00001597 IgnoreUnused(descriptor);
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001598
1599 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001600 std::array<DataType, 6> supportedTypes =
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001601 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001602 DataType::BFloat16,
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001603 DataType::Float16,
1604 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001605 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001606 DataType::QAsymmU8,
1607 DataType::QSymmS16
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001608 };
1609
1610 bool supported = true;
1611
1612 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1613 "Reference normalization: input type not supported.");
1614
1615 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1616 "Reference normalization: output type not supported.");
1617
1618 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1619 "Reference normalization: input and output shapes have different "
1620 "num total elements.");
1621
1622 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001623}
1624
Derek Lamberti901ea112019-12-10 22:07:09 +00001625bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1626 Optional<std::string&> /*reasonIfUnsupported*/) const
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001627{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001628 return true;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001629}
1630
1631bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1632 const TensorInfo& output,
1633 const PadDescriptor& descriptor,
1634 Optional<std::string&> reasonIfUnsupported) const
1635{
Jan Eilers8eb25602020-03-09 12:13:48 +00001636 IgnoreUnused(descriptor);
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001637 bool supported = true;
1638
1639 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001640 std::array<DataType,6> supportedTypes =
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001641 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001642 DataType::BFloat16,
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001643 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001644 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001645 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001646 DataType::QAsymmU8,
1647 DataType::QSymmS16
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001648 };
1649
1650 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1651 "Reference pad: input is not a supported type.");
1652
1653 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1654 "Reference pad: output is not a supported type.");
1655
1656 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1657 "Reference pad: input and output types are mismatched.");
1658
1659 return supported;
Nina Drozd661dfa72018-10-02 11:14:17 +01001660}
1661
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001662bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1663 const TensorInfo& output,
1664 const PermuteDescriptor& descriptor,
1665 Optional<std::string&> reasonIfUnsupported) const
1666{
Jan Eilers8eb25602020-03-09 12:13:48 +00001667 IgnoreUnused(descriptor);
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001668 bool supported = true;
1669
1670 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001671 std::array<DataType, 6> supportedTypes =
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001672 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001673 DataType::BFloat16,
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001674 DataType::Float32,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001675 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001676 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001677 DataType::QAsymmU8,
1678 DataType::QSymmS16
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001679 };
1680
1681 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1682 "Reference permute: input is not a supported type.");
1683
1684 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1685 "Reference permute: output is not a supported type.");
1686
1687 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1688 "Reference permute: input and output types are mismatched.");
1689
1690 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001691}
1692
1693bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1694 const TensorInfo& output,
1695 const Pooling2dDescriptor& descriptor,
1696 Optional<std::string&> reasonIfUnsupported) const
1697{
Jan Eilers8eb25602020-03-09 12:13:48 +00001698 IgnoreUnused(descriptor);
Teresa Charlina3b20472019-06-06 11:12:32 +01001699 bool supported = true;
1700
1701 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001702 std::array<DataType,6> supportedTypes =
Teresa Charlina3b20472019-06-06 11:12:32 +01001703 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001704 DataType::BFloat16,
Teresa Charlina3b20472019-06-06 11:12:32 +01001705 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001706 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001707 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001708 DataType::QAsymmU8,
1709 DataType::QSymmS16
Teresa Charlina3b20472019-06-06 11:12:32 +01001710 };
1711
1712 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1713 "Reference poolind2d: input is not a supported type.");
1714
1715 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1716 "Reference poolind2d: output is not a supported type.");
1717
1718 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1719 "Reference poolind2d: input and output types are mismatched.");
1720
1721 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001722}
1723
Tamás Nyíri7b885b32021-10-26 14:47:57 +01001724bool RefLayerSupport::IsPooling3dSupported(const TensorInfo& input,
1725 const TensorInfo& output,
1726 const Pooling3dDescriptor& descriptor,
1727 Optional<std::string&> reasonIfUnsupported) const
1728{
1729 IgnoreUnused(descriptor);
1730 bool supported = true;
1731
1732 // Define supported output and inputs types.
1733 std::array<DataType,6> supportedTypes =
1734 {
1735 DataType::BFloat16,
1736 DataType::Float32,
1737 DataType::Float16,
1738 DataType::QAsymmS8,
1739 DataType::QAsymmU8,
1740 DataType::QSymmS16
1741 };
1742
1743 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1744 "Reference poolind3d: input is not a supported type.");
1745
1746 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1747 "Reference poolind3d: output is not a supported type.");
1748
1749 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1750 "Reference poolind3d: input and output types are mismatched.");
1751
1752 return supported;
1753}
1754
1755
James Conroy4f1f8992020-04-29 20:01:10 +01001756bool RefLayerSupport::IsQLstmSupported(const TensorInfo& input,
1757 const TensorInfo& previousOutputIn,
1758 const TensorInfo& previousCellStateIn,
1759 const TensorInfo& outputStateOut,
1760 const TensorInfo& cellStateOut,
1761 const TensorInfo& output,
1762 const QLstmDescriptor& descriptor,
1763 const LstmInputParamsInfo& paramsInfo,
1764 Optional<std::string&> reasonIfUnsupported) const
1765{
1766 IgnoreUnused(input);
1767 IgnoreUnused(previousOutputIn);
1768 IgnoreUnused(previousCellStateIn);
1769 IgnoreUnused(outputStateOut);
1770 IgnoreUnused(cellStateOut);
1771 IgnoreUnused(output);
1772 IgnoreUnused(descriptor);
1773 IgnoreUnused(paramsInfo);
1774
1775 IgnoreUnused(reasonIfUnsupported);
1776
1777 return true;
1778}
1779
Derek Lamberti5f400d62019-03-25 15:41:58 +00001780bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1781 const TensorInfo& output,
1782 Optional<std::string&> reasonIfUnsupported) const
1783{
1784 bool supported = true;
1785
Finn Williamsfd271062019-12-04 14:27:27 +00001786 // Define supported input types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001787 std::array<DataType,7> supportedInputTypes = {
1788 DataType::BFloat16,
Keith Davis5e51cd82020-01-29 16:52:59 +00001789 DataType::Float32,
Keith Davis3d8bc972020-02-04 09:31:47 +00001790 DataType::Float16,
Ryan OShea9add1202020-02-07 10:06:33 +00001791 DataType::QAsymmS8,
Keith Davis5e51cd82020-01-29 16:52:59 +00001792 DataType::QAsymmU8,
1793 DataType::QSymmS8,
1794 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001795 };
1796
1797 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1798 "Reference quantize: input type not supported.");
1799
1800 // Define supported output types.
Ryan OShea9add1202020-02-07 10:06:33 +00001801 std::array<DataType,4> supportedOutputTypes = {
Ryan OShea9add1202020-02-07 10:06:33 +00001802 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001803 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +00001804 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001805 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001806 };
1807 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1808 "Reference quantize: output type not supported.");
1809
1810 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1811 "Reference quantize: input and output shapes have different num total elements.");
1812
1813 return supported;
1814}
1815
Finn Williams2605b232020-06-10 15:53:46 +01001816bool RefLayerSupport::IsRankSupported(const TensorInfo& input,
1817 const TensorInfo& output,
1818 Optional<std::string&> reasonIfUnsupported) const
1819{
1820 IgnoreUnused(input);
1821 // Define supported output types.
1822 std::array<DataType,1> supportedOutputTypes =
1823 {
1824 DataType::Signed32,
1825 };
1826
1827 return CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1828 "Reference rank: input type not supported.");
1829}
1830
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001831bool RefLayerSupport::IsReduceSupported(const TensorInfo& input,
1832 const TensorInfo& output,
1833 const ReduceDescriptor& descriptor,
1834 Optional<std::string&> reasonIfUnsupported) const
1835{
1836 IgnoreUnused(descriptor);
1837 bool supported = true;
1838 std::array<DataType,7> supportedTypes =
1839 {
1840 DataType::BFloat16,
1841 DataType::Float32,
1842 DataType::Float16,
1843 DataType::QAsymmS8,
1844 DataType::QAsymmU8,
1845 DataType::QSymmS16,
1846 DataType::Signed32
1847 };
1848
1849 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1850 "Reference Reduce: input type not supported");
1851
1852 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1853 "Reference Reduce: output type not supported");
1854
1855 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1856 "Reference Reduce: input and output types not matching");
1857
1858 return supported;
1859}
1860
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001861bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001862 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001863 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001864 Optional<std::string&> reasonIfUnsupported) const
1865{
Jan Eilers8eb25602020-03-09 12:13:48 +00001866 IgnoreUnused(output);
1867 IgnoreUnused(descriptor);
Nina Drozd2f2778f2019-05-27 10:37:05 +01001868 // Define supported output types.
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +00001869 std::array<DataType,8> supportedOutputTypes =
Nina Drozd2f2778f2019-05-27 10:37:05 +01001870 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001871 DataType::BFloat16,
Nina Drozd2f2778f2019-05-27 10:37:05 +01001872 DataType::Float32,
1873 DataType::Float16,
Narumol Prangnawarat0718ee92019-09-13 16:53:38 +01001874 DataType::Signed32,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001875 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001876 DataType::QAsymmU8,
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +00001877 DataType::QSymmS16,
1878 DataType::Boolean
Nina Drozd2f2778f2019-05-27 10:37:05 +01001879 };
Keith Davis0c2eeac2020-02-11 16:51:50 +00001880
Nina Drozd2f2778f2019-05-27 10:37:05 +01001881 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1882 "Reference reshape: input type not supported.");
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001883}
1884
Teresa Charlin970f43b2019-07-01 13:51:07 +01001885bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1886 const TensorInfo& output,
1887 const ResizeDescriptor& descriptor,
1888 Optional<std::string&> reasonIfUnsupported) const
1889{
Jan Eilers8eb25602020-03-09 12:13:48 +00001890 IgnoreUnused(descriptor);
Teresa Charlin970f43b2019-07-01 13:51:07 +01001891 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001892 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001893 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001894 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001895 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001896 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001897 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001898 DataType::QAsymmU8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001899 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001900 };
1901
1902 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1903 "Reference Resize: input type not supported");
1904
1905 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1906 "Reference Resize: output type not supported");
1907
1908 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1909 "Reference Resize: input and output types not matching");
1910
1911 return supported;
1912}
1913
Keith Davis3ae3f972021-05-21 16:33:48 +01001914bool RefLayerSupport::IsShapeSupported(const TensorInfo& input,
1915 const TensorInfo& output,
1916 Optional<std::string&> reasonIfUnsupported) const
1917{
1918 IgnoreUnused(input);
1919 bool supported = true;
1920
1921 std::array<DataType, 1> supportedTypes =
1922 {
1923 DataType::Signed32
1924 };
1925
1926 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1927 "Reference Shape: output type not supported");
1928
1929 return supported;
1930}
1931
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001932bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1933 const TensorInfo& output,
1934 const SliceDescriptor& descriptor,
1935 Optional<std::string&> reasonIfUnsupported) const
1936{
Jan Eilers8eb25602020-03-09 12:13:48 +00001937 IgnoreUnused(descriptor);
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001938 bool supported = true;
1939
Sadik Armagan303980c2020-04-17 12:45:14 +01001940 std::array<DataType, 5> supportedTypes =
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001941 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001942 DataType::BFloat16,
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001943 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001944 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001945 DataType::QAsymmU8,
1946 DataType::QSymmS16
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001947 };
1948
1949 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1950 "Reference Slice: input type not supported");
1951
1952 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1953 "Reference Slice: output type not supported");
1954
1955 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1956 "Reference Slice: input and output types are mismatched");
1957
1958 return supported;
1959}
1960
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001961bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1962 const TensorInfo& output,
1963 const SoftmaxDescriptor& descriptor,
1964 Optional<std::string&> reasonIfUnsupported) const
1965{
Jan Eilers8eb25602020-03-09 12:13:48 +00001966 IgnoreUnused(descriptor);
nikraj01248683f2019-05-29 16:46:50 +01001967 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001968 std::array<DataType,7> supportedTypes =
nikraj01248683f2019-05-29 16:46:50 +01001969 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001970 DataType::BFloat16,
1971 DataType::Float32,
1972 DataType::Float16,
1973 DataType::QSymmS8,
1974 DataType::QAsymmS8,
1975 DataType::QAsymmU8,
1976 DataType::QSymmS16
nikraj01248683f2019-05-29 16:46:50 +01001977 };
1978
1979 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001980 "Reference Softmax: output type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001981
1982 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001983 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001984
1985 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001986 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001987
1988 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001989}
1990
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001991bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1992 const TensorInfo& output,
1993 const SpaceToBatchNdDescriptor& descriptor,
1994 Optional<std::string&> reasonIfUnsupported) const
1995{
Jan Eilers8eb25602020-03-09 12:13:48 +00001996 IgnoreUnused(descriptor);
nikraj01120522a2019-05-31 11:33:07 +01001997 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001998 std::array<DataType,6> supportedTypes =
nikraj01120522a2019-05-31 11:33:07 +01001999 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002000 DataType::BFloat16,
2001 DataType::Float32,
2002 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002003 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002004 DataType::QAsymmU8,
2005 DataType::QSymmS16
nikraj01120522a2019-05-31 11:33:07 +01002006 };
2007
2008 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2009 "Reference SpaceToBatchNd: input type not supported");
2010
2011 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2012 "Reference SpaceToBatchNd: output type not supported");
2013
2014 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2015 "Reference SpaceToBatchNd: input and output types are mismatched");
2016
2017 return supported;
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00002018}
2019
Keith Davisa57eccb2019-06-14 17:33:22 +01002020bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
Keith Davis51910332019-06-26 15:28:43 +01002021 const TensorInfo& output,
2022 const SpaceToDepthDescriptor& descriptor,
2023 Optional<std::string&> reasonIfUnsupported) const
Keith Davisa57eccb2019-06-14 17:33:22 +01002024{
2025
Jan Eilers8eb25602020-03-09 12:13:48 +00002026 IgnoreUnused(descriptor);
Keith Davisa57eccb2019-06-14 17:33:22 +01002027 bool supported = true;
2028
Sadik Armagan303980c2020-04-17 12:45:14 +01002029 std::array<DataType,6> supportedTypes =
Keith Davisa57eccb2019-06-14 17:33:22 +01002030 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002031 DataType::BFloat16,
Keith Davisa57eccb2019-06-14 17:33:22 +01002032 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002033 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002034 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002035 DataType::QAsymmU8,
2036 DataType::QSymmS16
Keith Davisa57eccb2019-06-14 17:33:22 +01002037 };
2038
2039 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2040 "Reference SpaceToDepth: input type not supported");
2041
2042 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2043 "Reference SpaceToDepth: output type not supported");
2044
2045 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2046 "Reference SpaceToDepth: input and output types are mismatched");
2047
2048 return supported;
2049}
2050
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002051bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01002052 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
2053 const ViewsDescriptor& descriptor,
2054 Optional<std::string&> reasonIfUnsupported) const
2055{
Jan Eilers8eb25602020-03-09 12:13:48 +00002056 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002057 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002058 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002059 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002060 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002061 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002062 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002063 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002064 DataType::QAsymmU8,
2065 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002066 };
2067
2068 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2069 "Reference splitter: output type not supported");
Derek Lambertieac4adb2020-08-25 13:05:59 +01002070 for (const TensorInfo& output : outputs)
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002071 {
2072 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2073 "Reference splitter: input type not supported");
2074
2075 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2076 "Reference splitter: input and output types mismatched.");
2077 }
2078
2079 return supported;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01002080}
2081
Matthew Jackson81e601c2019-07-11 12:07:09 +01002082bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
2083 const TensorInfo& output,
2084 const StackDescriptor& descriptor,
2085 Optional<std::string&> reasonIfUnsupported) const
2086{
Jan Eilers8eb25602020-03-09 12:13:48 +00002087 IgnoreUnused(descriptor);
Matthew Jackson81e601c2019-07-11 12:07:09 +01002088
2089 bool supported = true;
Sadik Armagan529195f2022-01-14 12:56:35 +00002090 std::array<DataType,7> supportedTypes =
Matthew Jackson81e601c2019-07-11 12:07:09 +01002091 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002092 DataType::BFloat16,
Matthew Jackson81e601c2019-07-11 12:07:09 +01002093 DataType::Float32,
Matthew Jacksone69c3992019-09-09 14:31:21 +01002094 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002095 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002096 DataType::QAsymmU8,
Sadik Armagan529195f2022-01-14 12:56:35 +00002097 DataType::QSymmS16,
2098 DataType::Signed32
Matthew Jackson81e601c2019-07-11 12:07:09 +01002099 };
2100
2101 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2102 "Reference stack: output type not supported");
2103 for (const TensorInfo* input : inputs)
2104 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002105 ARMNN_ASSERT(input != nullptr);
Matthew Jackson81e601c2019-07-11 12:07:09 +01002106 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
2107 "Reference stack: input type not supported");
2108
2109 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
2110 "Reference stack: input and output types mismatched.");
2111 }
2112
2113 return supported;
2114}
2115
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002116bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
2117 const TensorInfo& output,
2118 const StridedSliceDescriptor& descriptor,
2119 Optional<std::string&> reasonIfUnsupported) const
2120{
Jan Eilers8eb25602020-03-09 12:13:48 +00002121 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002122 bool supported = true;
2123
Sadik Armagan303980c2020-04-17 12:45:14 +01002124 std::array<DataType,5> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002125 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002126 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002127 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01002128 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002129 DataType::QAsymmU8,
2130 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002131 };
2132
2133 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2134 "Reference StridedSlice: input type not supported");
2135
2136 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2137 "Reference StridedSlice: output type not supported");
2138
2139 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2140 "Reference StridedSlice: input and output types are mismatched");
2141
2142 return supported;
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002143}
2144
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002145bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
2146 const TensorInfo& input1,
2147 const TensorInfo& output,
2148 Optional<std::string&> reasonIfUnsupported) const
2149{
Sadik Armagan2999a022019-04-09 14:20:12 +01002150 bool supported = true;
2151
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002152 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002153 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01002154 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002155 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002156 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002157 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002158 DataType::QSymmS16,
2159 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01002160 };
2161
2162 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
2163 "Reference subtraction: input 0 is not a supported type.");
2164
2165 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
2166 "Reference subtraction: input 1 is not a supported type.");
2167
2168 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2169 "Reference subtraction: output is not a supported type.");
2170
2171 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
2172 "Reference subtraction: input 0 and Input 1 types are mismatched");
2173
2174 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
2175 "Reference subtraction: input and output types are mismatched");
2176
2177 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
2178 "Reference subtraction: shapes are not suitable for implicit broadcast.");
2179
2180 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002181}
2182
Matteo Martincighab9e5252019-06-13 17:27:46 +01002183bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
2184 const TensorInfo& alpha,
2185 const TensorInfo& output,
2186 Optional<std::string&> reasonIfUnsupported) const
2187{
2188 bool supported = true;
2189
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002190 std::array<DataType, 6> supportedTypes
Matteo Martincighab9e5252019-06-13 17:27:46 +01002191 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002192 DataType::BFloat16,
Matteo Martincighab9e5252019-06-13 17:27:46 +01002193 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002194 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002195 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002196 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002197 DataType::QSymmS16
Matteo Martincighab9e5252019-06-13 17:27:46 +01002198 };
2199
2200 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2201 "PReLU: input is not a supported type.");
2202
2203 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
2204 "PReLU: alpha is not a supported type.");
2205
2206 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2207 "PReLU: output is not a supported type.");
2208
2209 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
2210 "PReLU: input, alpha and output types are mismatched");
2211
2212 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
2213 "PReLU: shapes are not suitable for implicit broadcast");
2214
2215 return supported;
2216}
2217
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002218bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
2219 const TensorInfo& output,
2220 const TransposeConvolution2dDescriptor& descriptor,
2221 const TensorInfo& weights,
2222 const Optional<TensorInfo>& biases,
2223 Optional<std::string&> reasonIfUnsupported) const
2224{
Jan Eilers8eb25602020-03-09 12:13:48 +00002225 IgnoreUnused(descriptor);
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002226 bool supported = true;
2227
Sadik Armagan303980c2020-04-17 12:45:14 +01002228 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002229 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002230 DataType::BFloat16,
2231 DataType::Float32,
2232 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002233 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002234 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +01002235 DataType::QSymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002236 DataType::QSymmS16
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002237 };
2238
2239 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2240 "Reference TransposeConvolution2d: input is not a supported type.");
2241
2242 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2243 "Reference TransposeConvolution2d: output is not a supported type.");
2244
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002245 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2246 "Reference TransposeConvolution2d: input and output types mismatched.");
2247
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002248
2249 const DataType inputType = input.GetDataType();
Sadik Armagan303980c2020-04-17 12:45:14 +01002250 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002251 {
Jan Eilers1b2654f2021-09-24 15:45:46 +01002252 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002253 {
Sadik Armagan303980c2020-04-17 12:45:14 +01002254 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002255 DataType::QAsymmU8,
Jan Eilers1b2654f2021-09-24 15:45:46 +01002256 DataType::QSymmS8
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002257 };
2258
2259 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
2260 "Reference TransposeConvolution2d: weights type not supported for "
2261 "quantized input.");
2262 }
2263 else
2264 {
2265 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
2266 "Reference TransposeConvolution2d: weights is not a supported type.");
2267
2268 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
2269 "Reference TransposeConvolution2d: input and weights types mismatched.");
2270 }
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002271
2272 if (biases.has_value())
2273 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002274 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar651aafe2019-08-05 11:52:05 +01002275 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002276 DataType::BFloat16,
2277 DataType::Float32,
2278 DataType::Float16,
2279 DataType::Signed32
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002280 };
2281 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
2282 "Reference TransposeConvolution2d: biases is not a supported type.");
2283 }
2284
2285 return supported;
2286}
2287
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002288bool RefLayerSupport::IsTransposeSupported(const TensorInfo& input,
2289 const TensorInfo& output,
2290 const TransposeDescriptor& descriptor,
2291 Optional<std::string&> reasonIfUnsupported) const
2292{
Jan Eilers8eb25602020-03-09 12:13:48 +00002293 IgnoreUnused(descriptor);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002294 bool supported = true;
2295
2296 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01002297 std::array<DataType, 6> supportedTypes =
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002298 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002299 DataType::BFloat16,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002300 DataType::Float32,
2301 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002302 DataType::QAsymmS8,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002303 DataType::QAsymmU8,
2304 DataType::QSymmS16
2305 };
2306
2307 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2308 "Reference transpose: input is not a supported type.");
2309
2310 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2311 "Reference transpose: output is not a supported type.");
2312
2313 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2314 "Reference transpose: input and output types are mismatched.");
2315
2316 return supported;
2317}
2318
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002319bool RefLayerSupport::IsUnidirectionalSequenceLstmSupported(
2320 const TensorInfo& input,
2321 const TensorInfo& outputStateIn,
2322 const TensorInfo& cellStateIn,
2323 const TensorInfo& output,
2324 const Optional<TensorInfo>& hiddenStateOutput,
2325 const Optional<TensorInfo>& cellStateOutput,
2326 const UnidirectionalSequenceLstmDescriptor& descriptor,
2327 const LstmInputParamsInfo& paramsInfo,
2328 Optional<std::string&> reasonIfUnsupported) const
2329{
2330 IgnoreUnused(descriptor);
2331 IgnoreUnused(paramsInfo);
2332 IgnoreUnused(outputStateIn);
2333 IgnoreUnused(cellStateIn);
2334 bool supported = true;
2335
2336 if (hiddenStateOutput.has_value() || cellStateOutput.has_value())
2337 {
2338 reasonIfUnsupported.value() += "Reference UnidirectionalSequenceLstm: hidden state output "
2339 "and cell state output are not supported at the moment.";
2340 }
2341
2342 std::array<DataType, 1> supportedTypes =
2343 {
2344 DataType::Float32
2345 };
2346
Narumol Prangnawaratbd575b22021-08-31 16:53:54 +01002347 std::array<DataType, 2> supportedWeightTypes =
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002348 {
Narumol Prangnawaratbd575b22021-08-31 16:53:54 +01002349 DataType::Float32,
2350 DataType::QAsymmS8
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002351 };
2352
2353 // check inputs and outputs
2354 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2355 "Reference UnidirectionalSequenceLstm: input is not a supported type.");
2356 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
2357 "Reference UnidirectionalSequenceLstm: input and outputStateIn types are mismatched");
2358 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
2359 "Reference UnidirectionalSequenceLstm: input and cellStateIn types are mismatched");
2360
2361 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2362 "Reference UnidirectionalSequenceLstm: input and output types are mismatched");
2363 // check layer parameters
2364 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToForgetWeights(), supportedWeightTypes),
2365 reasonIfUnsupported,
2366 "Reference UnidirectionalSequenceLstm: InputToForgetWeights "
2367 "is not a supported type.");
2368 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToCellWeights(), supportedWeightTypes),
2369 reasonIfUnsupported,
2370 "Reference UnidirectionalSequenceLstm: InputToCellWeights is not a supported type.");
2371 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToOutputWeights(), supportedWeightTypes),
2372 reasonIfUnsupported,
2373 "Reference UnidirectionalSequenceLstm: InputToOutputWeights "
2374 "is not a supported type.");
2375 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToForgetWeights(), supportedWeightTypes),
2376 reasonIfUnsupported,
2377 "Reference UnidirectionalSequenceLstm: RecurrentToForgetWeights "
2378 "is not a supported type.");
2379 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToCellWeights(), supportedWeightTypes),
2380 reasonIfUnsupported,
2381 "Reference UnidirectionalSequenceLstm: RecurrentToCellWeights "
2382 "is not a supported type.");
2383 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToOutputWeights(), supportedWeightTypes),
2384 reasonIfUnsupported,
2385 "Reference UnidirectionalSequenceLstm: RecurrentToOutputWeights "
2386 "is not a supported type.");
2387 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
2388 "Reference UnidirectionalSequenceLstm: input and ForgetGateBias types "
2389 "are mismatched");
2390 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
2391 "Reference UnidirectionalSequenceLstm: input and CellBias types are mismatched");
2392 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
2393 "Reference UnidirectionalSequenceLstm: input and OutputGateBias types "
2394 "are mismatched");
2395 if (!descriptor.m_CifgEnabled)
2396 {
2397 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToInputWeights(), supportedWeightTypes),
2398 reasonIfUnsupported,
2399 "Reference UnidirectionalSequenceLstm: InputToInputWeights "
2400 "is not a supported type.");
2401 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToInputWeights(), supportedWeightTypes),
2402 reasonIfUnsupported,
2403 "Reference UnidirectionalSequenceLstm: RecurrentToInputWeights "
2404 "is not a supported type.");
2405 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
2406 "Reference UnidirectionalSequenceLstm: input and InputGateBias types "
2407 "are mismatched");
2408 if (descriptor.m_PeepholeEnabled)
2409 {
2410 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToInputWeights(), supportedWeightTypes),
2411 reasonIfUnsupported,
2412 "Reference UnidirectionalSequenceLstm: CellToInputWeights "
2413 "is not a supported type.");
2414 }
2415 }
2416 if (descriptor.m_PeepholeEnabled)
2417 {
2418 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToForgetWeights(), supportedWeightTypes),
2419 reasonIfUnsupported,
2420 "Reference UnidirectionalSequenceLstm: CellToForgetWeights "
2421 "is not a supported type.");
2422 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToOutputWeights(), supportedWeightTypes),
2423 reasonIfUnsupported,
2424 "Reference UnidirectionalSequenceLstm: CellToOutputWeights "
2425 "is not a supported type.");
2426 }
2427 if (descriptor.m_ProjectionEnabled)
2428 {
2429 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetProjectionWeights(), supportedWeightTypes),
2430 reasonIfUnsupported,
2431 "Reference UnidirectionalSequenceLstm: ProjectionWeights "
2432 "is not a supported type.");
2433 if (paramsInfo.m_ProjectionBias != nullptr)
2434 {
2435 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
2436 "Reference UnidirectionalSequenceLstm: input and ProjectionBias types "
2437 "are mismatched");
2438 }
2439 }
2440 if (descriptor.m_LayerNormEnabled)
2441 {
2442 if (!descriptor.m_CifgEnabled)
2443 {
2444 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputLayerNormWeights(), supportedWeightTypes),
2445 reasonIfUnsupported,
2446 "Reference UnidirectionalSequenceLstm: InputLayerNormWeights "
2447 "is not a supported type.");
2448 }
2449 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetForgetLayerNormWeights(), supportedWeightTypes),
2450 reasonIfUnsupported,
2451 "Reference UnidirectionalSequenceLstm: ForgetLayerNormWeights "
2452 "is not a supported type.");
2453 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellLayerNormWeights(), supportedWeightTypes),
2454 reasonIfUnsupported,
2455 "Reference UnidirectionalSequenceLstm: CellLayerNormWeights "
2456 "is not a supported type.");
2457 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetOutputLayerNormWeights(), supportedWeightTypes),
2458 reasonIfUnsupported,
2459 "Reference UnidirectionalSequenceLstm: OutputLayerNormWeights "
2460 "is not a supported type.");
2461 }
2462
2463 return supported;
2464}
2465
arovir011c7c81b2018-10-08 11:34:28 +01002466} // namespace armnn