blob: b80aa9992fccfa1016d159e8f4631ed4cf0076b2 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "RefLayerSupport.hpp"
David Beck3cc9a622018-10-12 10:38:31 +01007
Keith Davis0c2eeac2020-02-11 16:51:50 +00008#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009#include <armnn/Types.hpp>
Derek Lamberti50db4e82019-03-13 14:16:15 +000010#include <armnn/Descriptors.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000011#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010012#include <armnn/utility/NumericCast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <LayerSupportCommon.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010015#include <backendsCommon/LayerSupportRules.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000016
Derek Lamberti50db4e82019-03-13 14:16:15 +000017#include <vector>
Derek Lamberti50db4e82019-03-13 14:16:15 +000018#include <array>
19
telsoa014fcda012018-03-09 14:13:49 +000020namespace armnn
21{
22
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010023namespace
24{
25
26template<typename Float32Func, typename Uint8Func, typename ... Params>
27bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
28 DataType dataType,
29 Float32Func floatFuncPtr,
30 Uint8Func uint8FuncPtr,
31 Params&&... params)
32{
33 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
34 dataType,
35 &FalseFunc<Params...>,
36 floatFuncPtr,
37 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +000038 &FalseFunc<Params...>,
kevmay012b4d88e2019-01-24 14:05:09 +000039 &FalseFunc<Params...>,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010040 std::forward<Params>(params)...);
41}
42
43} // anonymous namespace
44
James Conroy4d1ff582019-06-10 17:06:39 +010045namespace
46{
47
48std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
49 unsigned int actual,
50 std::string& layerStr,
51 std::string& tensorName)
52{
53 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
54 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
55
56 return errorMsg;
57}
58
59} // anonymous namespace
Derek Lamberti50db4e82019-03-13 14:16:15 +000060
arovir011c7c81b2018-10-08 11:34:28 +010061bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
62 const TensorInfo& output,
63 const ActivationDescriptor& descriptor,
64 Optional<std::string&> reasonIfUnsupported) const
65{
Derek Lamberti50db4e82019-03-13 14:16:15 +000066 bool supported = true;
67
68 // Define supported types.
Keith Davis0c2eeac2020-02-11 16:51:50 +000069 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000070 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +000071 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +010072 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +000073 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +000074 DataType::QAsymmU8,
75 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +000076 };
77
78 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
79 "Reference activation: input type not supported.");
80
81 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
82 "Reference activation: output type not supported.");
83
84 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
85 "Reference activation: input and output types mismatched.");
86
87 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
88 "Reference activation: input and output shapes are of different rank.");
89
90
91 struct ActivationFunctionSupported : public Rule
92 {
93 ActivationFunctionSupported(const ActivationDescriptor& desc)
94 {
95 switch(desc.m_Function)
96 {
97 case ActivationFunction::Abs:
98 case ActivationFunction::BoundedReLu:
David Monahan3b3c3812020-02-25 09:03:29 +000099 case ActivationFunction::Elu:
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000100 case ActivationFunction::HardSwish:
Derek Lamberti50db4e82019-03-13 14:16:15 +0000101 case ActivationFunction::LeakyReLu:
102 case ActivationFunction::Linear:
103 case ActivationFunction::ReLu:
104 case ActivationFunction::Sigmoid:
105 case ActivationFunction::SoftReLu:
106 case ActivationFunction::Sqrt:
107 case ActivationFunction::Square:
108 case ActivationFunction::TanH:
109 {
110 m_Res = true;
111 break;
112 }
113 default:
114 {
115 m_Res = false;
116 break;
117 }
118 }
119 }
120 };
121
122 // Function is supported
123 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
124 "Reference activation: function not supported.");
125
126 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100127}
128
129bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
130 const TensorInfo& input1,
131 const TensorInfo& output,
132 Optional<std::string&> reasonIfUnsupported) const
133{
Derek Lamberti50db4e82019-03-13 14:16:15 +0000134 bool supported = true;
135
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100136 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000137 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +0000138 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100139 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000140 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000141 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100142 DataType::QSymmS16,
143 DataType::Signed32
Derek Lamberti50db4e82019-03-13 14:16:15 +0000144 };
145
146 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
147 "Reference addition: input 0 is not a supported type.");
148
149 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
150 "Reference addition: input 1 is not a supported type.");
151
152 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
153 "Reference addition: output is not a supported type.");
154
155 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
156 "Reference addition: input 0 and Input 1 types are mismatched");
157
158 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
159 "Reference addition: input and output types are mismatched");
160
161 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
162 "Reference addition: shapes are not suitable for implicit broadcast.");
163
164 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100165}
166
Nikhil Raj68c2c902019-09-19 11:21:11 +0100167bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
168 const armnn::ArgMinMaxDescriptor &descriptor,
169 armnn::Optional<std::string &> reasonIfUnsupported) const
170{
Jan Eilers8eb25602020-03-09 12:13:48 +0000171 IgnoreUnused(descriptor);
Nikhil Raj68c2c902019-09-19 11:21:11 +0100172
Mike Kelly1f140f72021-04-06 12:25:55 +0100173 std::array<DataType, 8> supportedInputTypes =
Nikhil Raj68c2c902019-09-19 11:21:11 +0100174 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000175 DataType::BFloat16,
Teresa Charline300b362020-05-25 10:01:03 +0100176 DataType::Float16,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100177 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +0100178 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000179 DataType::QAsymmU8,
180 DataType::QSymmS16,
Mike Kelly1f140f72021-04-06 12:25:55 +0100181 DataType::Signed32,
182 DataType::Signed64
183 };
184
185 std::array<DataType,2> supportedOutputTypes = {
186 DataType::Signed32,
187 DataType::Signed64
Nikhil Raj68c2c902019-09-19 11:21:11 +0100188 };
189
190 bool supported = true;
191
Mike Kelly1f140f72021-04-06 12:25:55 +0100192 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100193 "Reference ArgMinMax: input is not a supported type.");
Mike Kelly1f140f72021-04-06 12:25:55 +0100194 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100195 "Reference ArgMinMax: output type not supported");
196
197 return supported;
198}
199
arovir011c7c81b2018-10-08 11:34:28 +0100200bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
201 const TensorInfo& output,
202 const TensorInfo& mean,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100203 const TensorInfo& variance,
arovir011c7c81b2018-10-08 11:34:28 +0100204 const TensorInfo& beta,
205 const TensorInfo& gamma,
206 const BatchNormalizationDescriptor& descriptor,
207 Optional<std::string&> reasonIfUnsupported) const
208{
Jan Eilers8eb25602020-03-09 12:13:48 +0000209 IgnoreUnused(descriptor);
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100210
Sadik Armagan303980c2020-04-17 12:45:14 +0100211 std::array<DataType, 6> supportedTypes =
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100212 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000213 DataType::BFloat16,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100214 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100215 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100216 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000217 DataType::QAsymmU8,
218 DataType::QSymmS16
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100219 };
220
221 bool supported = true;
222
223 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
224 "Reference batch normalization: input is not a supported type.");
225
226 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
227 "Reference batch normalization: output is not a supported type.");
228
229 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
230 "Reference batch normalization: input and output types are mismatched");
231
232 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
233 "Reference batch normalization: mean is not a supported type.");
234
235 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
236 "Reference batch normalization: variance is not a supported type.");
237
238 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
239 "Reference batch normalization: beta is not a supported type.");
240
241 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
242 "Reference batch normalization: gamma is not a supported type.");
243
244 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100245}
246
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000247bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
248 const TensorInfo& output,
249 const BatchToSpaceNdDescriptor& descriptor,
250 Optional<std::string&> reasonIfUnsupported) const
251{
Jan Eilers8eb25602020-03-09 12:13:48 +0000252 IgnoreUnused(descriptor);
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100253
254 bool supported = true;
255
256 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
257 std::string inputTensorStr = "input";
258 std::string outputTensorStr = "output";
259
260 // Define supported types.
Sadik Armagan303980c2020-04-17 12:45:14 +0100261 std::array<DataType,6> supportedTypes =
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100262 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000263 DataType::BFloat16,
264 DataType::Float32,
265 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100266 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000267 DataType::QAsymmU8,
268 DataType::QSymmS16
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100269 };
270
271 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
272 "Reference BatchToSpaceNd: input type not supported.");
273
274 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
275 "Reference BatchToSpaceNd: output type not supported.");
276
277 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
278 "Reference BatchToSpaceNd: input and output types mismatched.");
279
280 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
281 reasonIfUnsupported,
282 CreateIncorrectDimensionsErrorMsg(4,
283 output.GetNumDimensions(),
284 batchToSpaceNdLayerStr,
285 outputTensorStr).data());
286
287 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
288 reasonIfUnsupported,
289 CreateIncorrectDimensionsErrorMsg(4,
290 input.GetNumDimensions(),
291 batchToSpaceNdLayerStr,
292 inputTensorStr).data());
293
294 return supported;
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000295}
296
mathad01b392e982021-04-07 12:07:30 +0100297bool RefLayerSupport::IsCastSupported(const TensorInfo& input,
298 const TensorInfo& output,
299 Optional<std::string&> reasonIfUnsupported) const
300{
301 std::array<DataType, 9> supportedInputTypes =
302 {
303 DataType::BFloat16,
304 DataType::Float32,
305 DataType::Float16,
306 DataType::QSymmS8,
307 DataType::QAsymmS8,
308 DataType::QAsymmU8,
309 DataType::QSymmS16,
310 DataType::Signed32
311 };
312
313 bool supported = true;
314 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
315 "Reference cast: input is not a supported type");
316
317
318 supported &= CheckSupportRule(TypeAnyOf(output, supportedInputTypes), reasonIfUnsupported,
319 "Reference cast: output is not a supported type");
320
321 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
322 "Reference cast: input and output shapes have different number of total elements");
323
324 return supported;
325}
326
Simon Obute51f67772021-09-03 15:50:13 +0100327bool RefLayerSupport::IsChannelShuffleSupported(const TensorInfo& input,
328 const TensorInfo& output,
329 const ChannelShuffleDescriptor& descriptor,
330 Optional<std::string&> reasonIfUnsupported) const
331{
332 IgnoreUnused(descriptor);
333 bool supported = true;
334
335 // Define supported output and inputs types.
336 std::array<DataType, 7> supportedTypes =
337 {
338 DataType::BFloat16,
339 DataType::Float32,
340 DataType::Float16,
341 DataType::QAsymmS8,
342 DataType::QAsymmU8,
343 DataType::QSymmS8,
344 DataType::QSymmS16
345 };
346
347 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
348 "Reference ChannelShuffle: input is not a supported type.");
349
350 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
351 "Reference ChannelShuffle: output is not a supported type.");
352
353 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
354 "Reference ChannelShuffle: input and output types are mismatched.");
355
356 return supported;
357}
358
359
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100360bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
361 const TensorInfo& input1,
362 const TensorInfo& output,
363 const ComparisonDescriptor& descriptor,
364 Optional<std::string&> reasonIfUnsupported) const
365{
Jan Eilers8eb25602020-03-09 12:13:48 +0000366 IgnoreUnused(descriptor);
Sadik Armagan303980c2020-04-17 12:45:14 +0100367 std::array<DataType, 8> supportedInputTypes =
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100368 {
Sadik Armaganb60dd242020-03-19 13:53:16 +0000369 DataType::Boolean,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000370 DataType::BFloat16,
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100371 DataType::Float32,
372 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100373 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000374 DataType::QAsymmU8,
Sadik Armaganb60dd242020-03-19 13:53:16 +0000375 DataType::QSymmS16,
376 DataType::Signed32
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100377 };
378
379 bool supported = true;
380 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
381 "Reference comparison: input 0 is not a supported type");
382
383 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
384 "Reference comparison: input 0 and Input 1 types are mismatched");
385
386 supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
387 "Reference comparison: output is not of type Boolean");
388
389 return supported;
390}
391
Jim Flynn906f9462019-05-10 13:55:21 +0100392bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
393 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +0100394 const ConcatDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100395 Optional<std::string&> reasonIfUnsupported) const
396{
Jan Eilers8eb25602020-03-09 12:13:48 +0000397 IgnoreUnused(descriptor);
Jim Flynne242f2d2019-05-22 14:24:13 +0100398
399 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000400 std::array<DataType,6> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100401 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000402 DataType::BFloat16,
403 DataType::Float32,
404 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000405 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100406 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000407 DataType::QSymmS16
Jim Flynne242f2d2019-05-22 14:24:13 +0100408 };
409
410 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
411 "Reference concatenation: output type not supported");
412 for (const TensorInfo* input : inputs)
413 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100414 ARMNN_ASSERT(input != nullptr);
Jim Flynne242f2d2019-05-22 14:24:13 +0100415 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
416 "Reference concatenation: input type not supported");
417
418 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
419 "Reference concatenation: input and output types mismatched.");
420 }
421
422 return supported;
Jim Flynn906f9462019-05-10 13:55:21 +0100423}
424
arovir011c7c81b2018-10-08 11:34:28 +0100425bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
426 Optional<std::string&> reasonIfUnsupported) const
427{
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100428 std::array<DataType,8> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100429 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000430 DataType::BFloat16,
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100431 DataType::Float16,
Nina Drozd58ef2c62019-05-16 12:09:18 +0100432 DataType::Float32,
Keith Davis67e6c542020-02-19 10:08:33 +0000433 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100434 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000435 DataType::QSymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100436 DataType::QSymmS16,
437 DataType::Signed32
Nina Drozd58ef2c62019-05-16 12:09:18 +0100438 };
439
440 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
441 "Reference constant: output is not a supported type.");
arovir011c7c81b2018-10-08 11:34:28 +0100442}
443
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000444bool RefLayerSupport::IsConvertBf16ToFp32Supported(const TensorInfo& input,
445 const TensorInfo& output,
446 Optional<std::string&> reasonIfUnsupported) const
447{
448 bool supported = true;
449
450 supported &= CheckSupportRule(TypeIs(input, DataType::BFloat16), reasonIfUnsupported,
451 "Reference for ConvertBf16ToFp32 layer: input type not supported");
452
453 supported &= CheckSupportRule(TypeIs(output, DataType::Float32), reasonIfUnsupported,
454 "Reference for ConvertBf16ToFp32 layer: output type not supported");
455
456 return supported;
457}
458
arovir011c7c81b2018-10-08 11:34:28 +0100459bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
460 const TensorInfo& output,
461 Optional<std::string&> reasonIfUnsupported) const
462{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100463 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
464 input.GetDataType(),
465 &TrueFunc<>,
466 &FalseInputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000467 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000468 &FalseFuncI32<>,
469 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100470 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
471 output.GetDataType(),
472 &FalseOutputFuncF16<>,
473 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000474 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000475 &FalseFuncI32<>,
476 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100477}
478
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000479bool RefLayerSupport::IsConvertFp32ToBf16Supported(const TensorInfo& input,
480 const TensorInfo& output,
481 Optional<std::string&> reasonIfUnsupported) const
482{
483 bool supported = true;
484
485 supported &= CheckSupportRule(TypeIs(input, DataType::Float32), reasonIfUnsupported,
486 "Reference for ConvertFp32ToBf16 layer: input type not supported");
487
488 supported &= CheckSupportRule(TypeIs(output, DataType::BFloat16), reasonIfUnsupported,
489 "Reference for ConvertFp32ToBf16 layer: output type not supported");
490
491 return supported;
492}
493
arovir011c7c81b2018-10-08 11:34:28 +0100494bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
495 const TensorInfo& output,
496 Optional<std::string&> reasonIfUnsupported) const
497{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100498 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
499 input.GetDataType(),
500 &FalseInputFuncF16<>,
501 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000502 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000503 &FalseFuncI32<>,
504 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100505 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
506 output.GetDataType(),
507 &TrueFunc<>,
508 &FalseOutputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000509 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000510 &FalseFuncI32<>,
511 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100512}
513
514bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
515 const TensorInfo& output,
516 const Convolution2dDescriptor& descriptor,
517 const TensorInfo& weights,
518 const Optional<TensorInfo>& biases,
519 Optional<std::string&> reasonIfUnsupported) const
520{
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100521 bool supported = true;
522
523 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000524 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000525 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000526 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000527 DataType::Float32,
528 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000529 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100530 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000531 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000532 DataType::QSymmS16
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100533 };
534
535 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000536 "Reference Convolution2d: input is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100537
538 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000539 "Reference Convolution2d: output is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100540
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000541 // For Convolution2d, we allow to have BFloat16 input with Float32 output for optimization.
542 if (input.GetDataType() == DataType::BFloat16)
543 {
544 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
545 {
546 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
547 supported = false;
548 }
549 }
550 else
551 {
552 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000553 "Reference Convolution2d: input and output types mismatched.");
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000554 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100555
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000556 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000557 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000558 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100559 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000560 {
Sadik Armagan303980c2020-04-17 12:45:14 +0100561 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000562 DataType::QAsymmU8,
Jan Eilers1b2654f2021-09-24 15:45:46 +0100563 DataType::QSymmS8
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000564 };
565
566 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000567 "Reference Convolution2d: weights type not supported for quantized input.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000568 }
569 else
570 {
571 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000572 "Reference Convolution2d: weights is not a supported type.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000573
574 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000575 "Reference Convolution2d: input and weights types mismatched.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000576 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100577
578 if (biases.has_value())
579 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000580 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000581 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000582 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000583 DataType::Float32,
584 DataType::Float16,
585 DataType::Signed32
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100586 };
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000587
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100588 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000589 "Reference Convolution2d: biases is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100590 }
Jan Eilers8eb25602020-03-09 12:13:48 +0000591 IgnoreUnused(descriptor);
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100592
593 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100594}
595
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100596bool RefLayerSupport::IsConvolution3dSupported(const TensorInfo& input,
597 const TensorInfo& output,
598 const Convolution3dDescriptor& descriptor,
599 const TensorInfo& weights,
600 const Optional<TensorInfo>& biases,
601 Optional<std::string&> reasonIfUnsupported) const
602{
603 bool supported = true;
604
605 // Define supported types.
606 std::array<DataType,7> supportedTypes =
607 {
608 DataType::BFloat16,
609 DataType::Float32,
610 DataType::Float16,
611 DataType::QAsymmS8,
612 DataType::QAsymmU8,
613 DataType::QSymmS8,
614 DataType::QSymmS16
615 };
616
617 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
618 "Reference Convolution3d: input is not a supported type.");
619
620 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
621 "Reference Convolution3d: output is not a supported type.");
622
623 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
624 "Reference Convolution3d: input and output types mismatched.");
625
626 const DataType inputType = input.GetDataType();
627 if (IsQuantized8BitType(inputType))
628 {
629 std::array<DataType, 3> supportedWeightTypes =
630 {
631 DataType::QAsymmS8,
632 DataType::QAsymmU8,
633 DataType::QSymmS8
634 };
635
636 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
637 "Reference Convolution3d: weights type not supported for quantized input.");
638 }
639 else
640 {
641 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
642 "Reference Convolution3d: weights is not a supported type.");
643
644 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
645 "Reference Convolution3d: input and weights types mismatched.");
646 }
647
648 if (biases.has_value())
649 {
650 std::array<DataType,4> biasesSupportedTypes =
651 {
652 DataType::BFloat16,
653 DataType::Float32,
654 DataType::Float16,
655 DataType::Signed32
656 };
657
658 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
659 "Reference Convolution3d: biases is not a supported type.");
660 }
661 IgnoreUnused(descriptor);
662
663 return supported;
664}
665
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000666bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
667 const TensorInfo& output,
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000668 Optional<std::string&> reasonIfUnsupported) const
669{
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100670 bool supported = true;
671
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000672 std::array<DataType, 8> supportedTypes =
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100673 {
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000674 DataType::BFloat16,
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000675 DataType::Float16,
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100676 DataType::Float32,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000677 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100678 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000679 DataType::QSymmS8,
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000680 DataType::QSymmS16,
681 DataType::Signed32
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100682 };
683
684 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000685 "Reference for Debug layer: input type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100686
687 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000688 "Reference for Debug layer: output type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100689
690 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000691 "Reference for Debug layer: input and output types are mismatched");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100692
693 return supported;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000694}
695
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100696bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
697 const TensorInfo& output,
698 const DepthToSpaceDescriptor& descriptor,
699 Optional<std::string&> reasonIfUnsupported) const
700{
Jan Eilers8eb25602020-03-09 12:13:48 +0000701 IgnoreUnused(descriptor);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100702 bool supported = true;
703
Sadik Armagan303980c2020-04-17 12:45:14 +0100704 std::array<DataType,6> supportedTypes =
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100705 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000706 DataType::BFloat16,
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100707 DataType::Float32,
708 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100709 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000710 DataType::QAsymmU8,
711 DataType::QSymmS16
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100712 };
713
714 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
715 "Reference DepthToSpace: input type not supported");
716
717 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
718 "Reference DepthToSpace: output type not supported");
719
720 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
721 "Reference DepthToSpace: input and output types are mismatched");
722
723 return supported;
724}
725
arovir011c7c81b2018-10-08 11:34:28 +0100726bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
727 const TensorInfo& output,
728 const DepthwiseConvolution2dDescriptor& descriptor,
729 const TensorInfo& weights,
730 const Optional<TensorInfo>& biases,
731 Optional<std::string&> reasonIfUnsupported) const
732{
Sadik Armagan303980c2020-04-17 12:45:14 +0100733 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100734 bool supported = true;
735
736 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000737 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100738 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000739 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100740 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100741 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000742 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000743 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100744 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000745 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100746 };
747
748 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
749 "Reference DepthwiseConvolution2d: input is not a supported type.");
750
751 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
752 "Reference DepthwiseConvolution2d: output is not a supported type.");
753
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100754 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
755 "Reference DepthwiseConvolution2d: input and output types mismatched.");
756
Teresa Charlind8df0262019-11-11 12:28:15 +0000757 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000758 if (IsQuantized8BitType(inputType))
Teresa Charlind8df0262019-11-11 12:28:15 +0000759 {
Jan Eilers1b2654f2021-09-24 15:45:46 +0100760 std::array<DataType, 3> supportedWeightTypes =
Sadik Armagan303980c2020-04-17 12:45:14 +0100761 {
762 DataType::QAsymmS8,
763 DataType::QAsymmU8,
764 DataType::QSymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100765 };
Teresa Charlind8df0262019-11-11 12:28:15 +0000766
767 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Sadik Armagan303980c2020-04-17 12:45:14 +0100768 "Reference DepthwiseConvolution2d: weights type not supported for "
769 "quantized input.");
Teresa Charlind8df0262019-11-11 12:28:15 +0000770 }
771 else
772 {
773 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
774 "Reference DepthwiseConvolution2d: weights is not a supported type.");
775
776 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
777 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
778 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100779
780 if (biases.has_value())
781 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000782 std::array<DataType,4> biasesSupportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100783 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000784 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100785 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100786 DataType::Float16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100787 DataType::Signed32
788 };
789 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
790 "Reference DepthwiseConvolution2d: biases is not a supported type.");
791 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100792
793 return supported;
794
arovir011c7c81b2018-10-08 11:34:28 +0100795}
796
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000797bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
798 const TensorInfo& output,
799 Optional<std::string&> reasonIfUnsupported) const
800{
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100801 bool supported = true;
802
Ryan OShea9add1202020-02-07 10:06:33 +0000803 std::array<DataType,4> supportedInputTypes = {
804 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000805 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +0000806 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000807 DataType::QSymmS16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100808 };
809
810 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000811 "Reference for Dequantize layer: input type not supported.");
812
Derek Lambertid466a542020-01-22 15:37:29 +0000813 supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
Teresa Charlin1b1950d2021-06-02 20:23:21 +0100814 "Reference for Dequantize layer: per-axis quantized input not supported.");
Derek Lambertid466a542020-01-22 15:37:29 +0000815
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000816 std::array<DataType,3> supportedOutputTypes = {
817 DataType::BFloat16,
Jan Eilersf7107932019-11-01 11:09:36 +0000818 DataType::Float32,
819 DataType::Float16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100820 };
821
822 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000823 "Reference for Dequantize layer: output type not supported.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100824
825 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000826 "Reference for Dequantize layer: input/output shapes have different num total "
827 "elements.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100828
829 return supported;
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000830}
831
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000832bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
833 const TensorInfo& scores,
834 const TensorInfo& anchors,
835 const TensorInfo& detectionBoxes,
836 const TensorInfo& detectionClasses,
837 const TensorInfo& detectionScores,
838 const TensorInfo& numDetections,
839 const DetectionPostProcessDescriptor& descriptor,
840 Optional<std::string&> reasonIfUnsupported) const
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000841{
Jan Eilers8eb25602020-03-09 12:13:48 +0000842 IgnoreUnused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
Derek Lamberti901ea112019-12-10 22:07:09 +0000843
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100844 bool supported = true;
845
Sadik Armaganaa41d5d2020-11-16 14:27:52 +0000846 std::array<DataType,6> supportedInputTypes =
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100847 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000848 DataType::BFloat16,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100849 DataType::Float32,
Sadik Armaganaa41d5d2020-11-16 14:27:52 +0000850 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100851 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000852 DataType::QAsymmU8,
853 DataType::QSymmS16
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100854 };
855
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000856 supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100857 "Reference DetectionPostProcess: input 0 is not a supported type.");
858
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000859 supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100860 "Reference DetectionPostProcess: input 1 is not a supported type.");
861
862 return supported;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000863}
864
Pablo Tellof0bd6832019-04-26 17:58:13 +0100865bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
866 const TensorInfo& output,
867 const DepthwiseConvolution2dDescriptor& descriptor,
868 const TensorInfo& weights,
869 const Optional<TensorInfo>& biases,
870 Optional<std::string&> reasonIfUnsupported) const
871{
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100872 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
Pablo Tellof0bd6832019-04-26 17:58:13 +0100873}
874
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100875bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
arovir011c7c81b2018-10-08 11:34:28 +0100876 const TensorInfo& input1,
877 const TensorInfo& output,
878 Optional<std::string&> reasonIfUnsupported) const
879{
Sadik Armagan2999a022019-04-09 14:20:12 +0100880 bool supported = true;
881
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100882 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000883 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +0100884 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100885 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100886 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000887 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100888 DataType::QSymmS16,
889 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +0100890 };
891
892 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
893 "Reference division: input 0 is not a supported type.");
894
895 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
896 "Reference division: input 1 is not a supported type.");
897
898 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
899 "Reference division: output is not a supported type.");
900
901 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
902 "Reference division: input 0 and Input 1 types are mismatched");
903
904 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
905 "Reference division: input and output types are mismatched");
906
907 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
908 "Reference division: shapes are not suitable for implicit broadcast.");
909
910 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100911}
912
josh minor4a3c6102020-01-06 16:40:46 -0600913bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
914 const TensorInfo& output,
915 const ElementwiseUnaryDescriptor& descriptor,
916 Optional<std::string&> reasonIfUnsupported) const
917{
Jan Eilers8eb25602020-03-09 12:13:48 +0000918 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600919
Sadik Armagan303980c2020-04-17 12:45:14 +0100920 std::array<DataType, 7> supportedTypes =
josh minor4a3c6102020-01-06 16:40:46 -0600921 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000922 DataType::BFloat16,
josh minor4a3c6102020-01-06 16:40:46 -0600923 DataType::Float32,
924 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100925 DataType::QAsymmS8,
josh minor4a3c6102020-01-06 16:40:46 -0600926 DataType::QAsymmU8,
Sadik Armaganac472102020-03-24 09:54:36 +0000927 DataType::QSymmS16,
928 DataType::Signed32
josh minor4a3c6102020-01-06 16:40:46 -0600929 };
930
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000931 std::array<DataType, 1> logicalSupportedTypes =
932 {
933 DataType::Boolean
934 };
935
josh minor4a3c6102020-01-06 16:40:46 -0600936 bool supported = true;
937
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000938 if (descriptor.m_Operation == UnaryOperation::LogicalNot)
939 {
940 supported &= CheckSupportRule(TypeAnyOf(input, logicalSupportedTypes), reasonIfUnsupported,
941 "Reference elementwise unary: input type not supported");
josh minor4a3c6102020-01-06 16:40:46 -0600942
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +0000943 supported &= CheckSupportRule(TypeAnyOf(output, logicalSupportedTypes), reasonIfUnsupported,
944 "Reference elementwise unary: output type not supported");
945 }
946 else
947 {
948 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
949 "Reference elementwise unary: input type not supported");
950
951 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
952 "Reference elementwise unary: output type not supported");
953 }
josh minor4a3c6102020-01-06 16:40:46 -0600954
955 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
956 "Reference elementwise unary: input and output types not matching");
957
958 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
959 "Reference elementwise unary: input and output shapes"
960 "have different number of total elements");
961
962 return supported;
963}
964
arovir011c7c81b2018-10-08 11:34:28 +0100965bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
966 const FakeQuantizationDescriptor& descriptor,
967 Optional<std::string&> reasonIfUnsupported) const
968{
Jan Eilers8eb25602020-03-09 12:13:48 +0000969 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100970 bool supported = true;
971
972 std::array<DataType,1> supportedTypes =
973 {
974 DataType::Float32
975 };
976
977 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
978 "Reference fake quantization: input type not supported.");
979
980 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100981}
982
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100983bool RefLayerSupport::IsFillSupported(const TensorInfo& input,
984 const TensorInfo& output,
985 const FillDescriptor& descriptor,
986 Optional<std::string&> reasonIfUnsupported) const
987{
988 IgnoreUnused(descriptor);
989 IgnoreUnused(output);
990
991 bool supported = true;
992
Sadik Armagana792a052020-06-23 16:22:23 +0100993 std::array<DataType,3> supportedTypes =
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100994 {
995 DataType::Float32,
Sadik Armagana792a052020-06-23 16:22:23 +0100996 DataType::Float16,
997 DataType::Signed32
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100998 };
999
Teresa Charlin4b10fef2020-07-29 09:36:41 +01001000 supported &= CheckSupportRule(TypeIs(input, DataType::Signed32), reasonIfUnsupported,
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001001 "Reference Fill: input type not supported.");
1002
Teresa Charlin44088502020-07-27 11:27:19 +01001003 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1004 "Reference Fill: output type not supported.");
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +01001005 return supported;
1006}
1007
arovir011c7c81b2018-10-08 11:34:28 +01001008bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
1009 const TensorInfo& output,
1010 Optional<std::string&> reasonIfUnsupported) const
1011{
Jan Eilers8eb25602020-03-09 12:13:48 +00001012 IgnoreUnused(output);
James Conroy83735b12019-05-30 16:36:59 +01001013 bool supported = true;
1014
Francis Murtaghe8ac1332020-07-30 18:03:40 +01001015 std::array<DataType,3> supportedTypes =
James Conroy83735b12019-05-30 16:36:59 +01001016 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001017 DataType::BFloat16,
James Conroyb40d7102019-06-04 12:32:09 +01001018 DataType::Float32,
Francis Murtaghe8ac1332020-07-30 18:03:40 +01001019 DataType::Float16
James Conroy83735b12019-05-30 16:36:59 +01001020 };
1021
1022 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1023 "Reference Floor: input type not supported.");
1024
1025 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1026 "Reference Floor: output type not supported.");
1027
1028 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001029}
1030
1031bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
1032 const TensorInfo& output,
1033 const TensorInfo& weights,
1034 const TensorInfo& biases,
1035 const FullyConnectedDescriptor& descriptor,
1036 Optional<std::string&> reasonIfUnsupported) const
1037{
Francis Murtagh46c09d02019-05-28 08:15:28 +01001038 bool supported = true;
1039
1040 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001041 std::array<DataType,6> supportedTypes =
Francis Murtagh46c09d02019-05-28 08:15:28 +01001042 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001043 DataType::BFloat16,
1044 DataType::Float32,
1045 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001046 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001047 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001048 DataType::QSymmS16
Francis Murtagh46c09d02019-05-28 08:15:28 +01001049 };
1050
1051 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1052 "Reference Fully Connected: input type not supported.");
1053
1054 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1055 "Reference Fully Connected: output type not supported.");
1056
Francis Murtagh46c09d02019-05-28 08:15:28 +01001057 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1058 "Reference Fully Connected: weights type not supported.");
1059
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +00001060 // For FullyConnected, we allow to have BFloat16 input with Float32 output for optimization.
1061 if (input.GetDataType() == DataType::BFloat16)
1062 {
1063 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
1064 {
1065 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
1066 supported = false;
1067 }
1068 }
1069 else
1070 {
1071 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1072 "Reference Fully Connected: input and output types mismatched.");
1073 }
1074
Jan Eilers1f45dc32020-06-15 11:43:03 +01001075 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1076 "Reference Fully Connected: weights is not a supported type.");
Francis Murtaghddb1d062020-03-10 13:51:45 +00001077
Jan Eilers1f45dc32020-06-15 11:43:03 +01001078 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
1079 "Reference Fully Connected: input and weights types mismatched.");
Francis Murtagh46c09d02019-05-28 08:15:28 +01001080
1081 if (descriptor.m_BiasEnabled)
1082 {
1083 // Defined supported types for bias
Sadik Armagandb73c982020-04-01 17:35:30 +01001084 std::array<DataType, 5>
Francis Murtagh46c09d02019-05-28 08:15:28 +01001085 supportedBiasTypes =
1086 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001087 DataType::BFloat16,
Francis Murtagh46c09d02019-05-28 08:15:28 +01001088 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001089 DataType::Float16,
Sadik Armagandb73c982020-04-01 17:35:30 +01001090 DataType::Signed32,
1091 DataType::QAsymmS8
Francis Murtagh46c09d02019-05-28 08:15:28 +01001092 };
1093
1094 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
1095 "Reference Fully Connected: bias type not supported.");
1096
1097 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
1098 "Reference Fully Connected: bias and weight types mismatch.");
1099
1100 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
1101 "Reference Fully Connected: bias type inferred from weights is incompatible.");
1102
Narumol Prangnawarat366d7232020-04-29 12:58:17 +01001103 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(biases, 1U), reasonIfUnsupported,
1104 "Reference Fully Connected: bias must have 1 dimension.");
1105
Francis Murtagh46c09d02019-05-28 08:15:28 +01001106 }
1107
1108 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001109}
1110
narpra014951d842019-01-18 16:53:53 +00001111bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
1112 const armnn::TensorInfo& input1,
1113 const armnn::TensorInfo& output,
Teresa Charlin52664732020-06-29 16:27:03 +01001114 const GatherDescriptor& descriptor,
narpra014951d842019-01-18 16:53:53 +00001115 armnn::Optional<std::string&> reasonIfUnsupported) const
1116{
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001117 bool supported = true;
Teresa Charlin3940d8b2020-05-29 16:47:23 +01001118 std::array<DataType,7> supportedTypes =
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001119 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001120 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001121 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001122 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001123 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001124 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01001125 DataType::QSymmS16,
1126 DataType::Signed32
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001127 };
1128
Teresa Charlin52664732020-06-29 16:27:03 +01001129 if (descriptor.m_Axis != 0)
1130 {
1131 reasonIfUnsupported.value() += std::string("Reference Gather: axis not supported\n");
1132 supported &= false;
1133 }
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001134 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1135 "Reference Gather: input type not supported");
1136
1137 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1138 "Reference Gather: output type not supported");
1139
1140 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
1141 "Reference Gather: indices (input1) type not supported");
1142
1143 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1144 "Reference Gather: input and output types not matching");
1145
1146 return supported;
narpra014951d842019-01-18 16:53:53 +00001147}
1148
Derek Lamberti901ea112019-12-10 22:07:09 +00001149bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
1150 Optional<std::string&> /*reasonIfUnsupported*/) const
arovir011c7c81b2018-10-08 11:34:28 +01001151{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001152 return true;
arovir011c7c81b2018-10-08 11:34:28 +01001153}
1154
Kevin May09ca49c2019-10-09 12:37:34 +01001155bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
1156 const TensorInfo& output,
1157 const InstanceNormalizationDescriptor& descriptor,
1158 Optional<std::string&> reasonIfUnsupported) const
1159{
Jan Eilers8eb25602020-03-09 12:13:48 +00001160 IgnoreUnused(descriptor);
Kevin May09ca49c2019-10-09 12:37:34 +01001161 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001162 std::array<DataType, 3> supportedTypes =
Kevin May09ca49c2019-10-09 12:37:34 +01001163 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001164 DataType::BFloat16,
Kevin May09ca49c2019-10-09 12:37:34 +01001165 DataType::Float32,
1166 DataType::Float16
1167 };
1168
1169 bool supported = true;
1170
1171 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1172 "Reference Instance Normalization: input type not supported.");
1173
1174 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1175 "Reference Instance Normalization: output type not supported.");
1176
1177 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1178 "Reference Instance Normalization: input and output types mismatched.");
1179
1180 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1181 "Reference Instance Normalization: input and output shapes have different "
1182 "num total elements.");
1183
1184 return supported;
1185}
1186
arovir011c7c81b2018-10-08 11:34:28 +01001187bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
1188 const TensorInfo& output,
1189 const L2NormalizationDescriptor& descriptor,
1190 Optional<std::string&> reasonIfUnsupported) const
1191{
Jan Eilers8eb25602020-03-09 12:13:48 +00001192 IgnoreUnused(descriptor);
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001193 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001194 std::array<DataType, 6> supportedTypes =
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001195 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001196 DataType::BFloat16,
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001197 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001198 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001199 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001200 DataType::QAsymmU8,
1201 DataType::QSymmS16
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001202 };
1203
1204 bool supported = true;
1205
1206 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1207 "Reference L2normalization: input type not supported.");
1208
1209 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1210 "Reference L2normalization: output type not supported.");
1211
1212 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1213 "Reference L2normalization: input and output types mismatched.");
1214
1215 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1216 "Reference L2normalization: input and output shapes have different "
1217 "num total elements.");
1218
1219 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001220}
1221
James Conroyaba90cd2020-11-06 16:28:18 +00001222bool RefLayerSupport::IsLogicalBinarySupported(const TensorInfo& input0,
1223 const TensorInfo& input1,
1224 const TensorInfo& output,
1225 const LogicalBinaryDescriptor& descriptor,
1226 Optional<std::string&> reasonIfUnsupported) const
1227{
1228 IgnoreUnused(descriptor);
1229
1230 std::array<DataType, 1> supportedTypes =
1231 {
1232 DataType::Boolean
1233 };
1234
1235 bool supported = true;
1236 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1237 "Reference LogicalBinary: input 0 type not supported");
1238 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1239 "Reference LogicalBinary: input 1 type not supported");
1240
1241 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1242 "Reference LogicalBinary: input and output types do not match");
1243
1244 return supported;
1245}
1246
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001247bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
1248 const TensorInfo& output,
1249 const LogSoftmaxDescriptor& descriptor,
1250 Optional<std::string&> reasonIfUnsupported) const
1251{
Jan Eilers8eb25602020-03-09 12:13:48 +00001252 IgnoreUnused(descriptor);
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001253
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001254 std::array<DataType, 3> supportedTypes =
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001255 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001256 DataType::BFloat16,
1257 DataType::Float32,
1258 DataType::Float16
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001259 };
1260
1261 bool supported = true;
1262 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1263 "Reference LogSoftmax: input type not supported");
1264
1265 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1266 "Reference LogSoftmax: output type not supported");
1267
1268 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1269 "Reference LogSoftmax: input and output types do not match");
1270
1271 return supported;
1272}
1273
arovir011c7c81b2018-10-08 11:34:28 +01001274bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1275 const TensorInfo& outputStateIn,
1276 const TensorInfo& cellStateIn,
1277 const TensorInfo& scratchBuffer,
1278 const TensorInfo& outputStateOut,
1279 const TensorInfo& cellStateOut,
1280 const TensorInfo& output,
1281 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001282 const LstmInputParamsInfo& paramsInfo,
1283 Optional<std::string&> reasonIfUnsupported) const
arovir011c7c81b2018-10-08 11:34:28 +01001284{
Jan Eilers8eb25602020-03-09 12:13:48 +00001285 IgnoreUnused(descriptor);
1286 IgnoreUnused(paramsInfo);
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001287
1288 bool supported = true;
1289
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001290 std::array<DataType,3> supportedTypes = {
1291 DataType::BFloat16,
Conor Kennedyb9971c92019-05-07 07:14:23 +01001292 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001293 DataType::QSymmS16
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001294 };
1295
Jan Eilersd01a83c2019-07-03 18:20:40 +01001296 // check inputs and outputs
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001297 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1298 "Reference Lstm: input is not a supported type.");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001299 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1300 "Reference Lstm: input and outputStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001301 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1302 "Reference Lstm: input and cellStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001303 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1304 "Reference Lstm: input and scratchBuffer types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001305 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1306 "Reference Lstm: input and outputStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001307 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1308 "Reference Lstm: input and cellStateOut types are mismatched");
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01001309
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001310 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1311 "Reference Lstm: input and output types are mismatched");
Jan Eilersd01a83c2019-07-03 18:20:40 +01001312 // check layer parameters
Francis Murtaghbb590b42019-08-14 09:51:36 +01001313 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001314 "Reference Lstm: input and InputToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001315 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001316 "Reference Lstm: input and InputToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001317 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001318 "Reference Lstm: input and InputToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001319 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001320 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001321 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001322 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001323 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001324 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001325 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001326 "Reference Lstm: input and ForgetGateBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001327 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001328 "Reference Lstm: input and CellBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001329 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001330 "Reference Lstm: input and OutputGateBias types are mismatched");
1331 if (!descriptor.m_CifgEnabled)
1332 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001333 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001334 "Reference Lstm: input and InputToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001335 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001336 reasonIfUnsupported,
1337 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001338 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001339 "Reference Lstm: input and InputGateBias types are mismatched");
1340 if (descriptor.m_PeepholeEnabled)
1341 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001342 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001343 reasonIfUnsupported,
1344 "Reference Lstm: input and CellToInputWeights types are mismatched");
1345 }
1346 }
1347 if (descriptor.m_PeepholeEnabled)
1348 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001349 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001350 "Reference Lstm: input and CellToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001351 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001352 "Reference Lstm: input and CellToOutputWeights types are mismatched");
1353 }
1354 if (descriptor.m_ProjectionEnabled)
1355 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001356 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001357 "Reference Lstm: input and mProjectionWeights types are mismatched");
1358 if (paramsInfo.m_ProjectionBias != nullptr)
1359 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001360 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001361 "Reference Lstm: input and ProjectionBias types are mismatched");
1362 }
1363 }
1364 if (descriptor.m_LayerNormEnabled)
1365 {
1366 if (!descriptor.m_CifgEnabled)
1367 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001368 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001369 reasonIfUnsupported,
1370 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1371 }
Francis Murtaghbb590b42019-08-14 09:51:36 +01001372 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001373 reasonIfUnsupported,
1374 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001375 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001376 reasonIfUnsupported,
1377 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001378 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001379 reasonIfUnsupported,
1380 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1381 }
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001382
1383 return supported;
telsoa01c577f2c2018-08-31 09:22:23 +01001384}
1385
saoste012df12b32018-11-28 16:57:20 +00001386bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1387 const TensorInfo& input1,
1388 const TensorInfo& output,
1389 Optional<std::string&> reasonIfUnsupported) const
1390{
Sadik Armagan2999a022019-04-09 14:20:12 +01001391 bool supported = true;
1392
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001393 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001394 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001395 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001396 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001397 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001398 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001399 DataType::QSymmS16,
1400 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001401 };
1402
1403 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1404 "Reference maximum: input 0 is not a supported type.");
1405
1406 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1407 "Reference maximum: input 1 is not a supported type.");
1408
1409 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1410 "Reference maximum: output is not a supported type.");
1411
1412 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1413 "Reference maximum: input 0 and Input 1 types are mismatched");
1414
1415 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1416 "Reference maximum: input and output types are mismatched");
1417
1418 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1419 "Reference maximum: shapes are not suitable for implicit broadcast.");
1420
1421 return supported;
saoste012df12b32018-11-28 16:57:20 +00001422}
1423
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001424bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1425 const TensorInfo& output,
1426 const MeanDescriptor& descriptor,
1427 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001428{
James Conroy4d1ff582019-06-10 17:06:39 +01001429 bool supported = true;
1430 std::string meanLayerStr = "Mean";
1431 std::string outputTensorStr = "output";
1432
Sadik Armagan303980c2020-04-17 12:45:14 +01001433 std::array<DataType,6> supportedTypes =
James Conroy4d1ff582019-06-10 17:06:39 +01001434 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001435 DataType::BFloat16,
James Conroy4d1ff582019-06-10 17:06:39 +01001436 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001437 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001438 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001439 DataType::QAsymmU8,
1440 DataType::QSymmS16
James Conroy4d1ff582019-06-10 17:06:39 +01001441 };
1442
1443 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1444 "Reference Mean: input type not supported.");
1445
1446 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1447 "Reference Mean: input and output types are mismatched");
1448
1449 if (descriptor.m_KeepDims)
1450 {
1451 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1452 reasonIfUnsupported,
1453 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1454 output.GetNumDimensions(),
1455 meanLayerStr, outputTensorStr).data());
1456 }
1457 else if (descriptor.m_Axis.empty())
1458 {
1459 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1460 reasonIfUnsupported,
1461 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1462 meanLayerStr, outputTensorStr).data());
1463 }
1464 else
1465 {
Matthew Sloyan171214c2020-09-09 09:07:37 +01001466 auto outputDim = input.GetNumDimensions() - armnn::numeric_cast<unsigned int>(descriptor.m_Axis.size());
James Conroy4d1ff582019-06-10 17:06:39 +01001467
1468 if (outputDim > 0)
1469 {
1470 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1471 reasonIfUnsupported,
1472 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1473 meanLayerStr, outputTensorStr).data());
1474 }
1475 else
1476 {
1477 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1478 reasonIfUnsupported,
1479 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1480 meanLayerStr, outputTensorStr).data());
1481 }
1482 }
1483
1484 return supported;
narpra0132b90462018-09-13 11:07:48 +01001485}
1486
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001487bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1488 const TensorInfo &output,
1489 Optional<std::string &> reasonIfUnsupported) const
1490{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001491 bool supported = true;
1492
Sadik Armagan303980c2020-04-17 12:45:14 +01001493 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001494 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001495 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001496 DataType::Float32,
1497 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001498 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001499 DataType::QAsymmU8,
1500 DataType::QSymmS16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001501 DataType::Boolean
1502 };
1503
1504 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1505 "Reference MemCopy: input type not supported");
1506
1507 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1508 "Reference MemCopy: output type not supported");
1509
1510 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1511 "Reference MemCopy: input and output types are mismatched");
1512
1513 return supported;
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001514}
1515
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001516bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1517 const TensorInfo& input1,
1518 const TensorInfo& output,
1519 Optional<std::string&> reasonIfUnsupported) const
1520{
Sadik Armagan2999a022019-04-09 14:20:12 +01001521 bool supported = true;
1522
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001523 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001524 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001525 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001526 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001527 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001528 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001529 DataType::QSymmS16,
1530 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001531 };
1532
1533 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1534 "Reference minimum: input 0 is not a supported type.");
1535
1536 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1537 "Reference minimum: input 1 is not a supported type.");
1538
1539 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1540 "Reference minimum: output is not a supported type.");
1541
1542 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1543 "Reference minimum: input 0 and Input 1 types are mismatched");
1544
1545 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1546 "Reference minimum: input and output types are mismatched");
1547
1548 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1549 "Reference minimum: shapes are not suitable for implicit broadcast.");
1550
1551 return supported;
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001552}
1553
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001554bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1555 const TensorInfo& input1,
1556 const TensorInfo& output,
1557 Optional<std::string&> reasonIfUnsupported) const
1558{
Sadik Armagan2999a022019-04-09 14:20:12 +01001559 bool supported = true;
1560
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001561 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001562 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001563 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001564 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001565 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001566 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001567 DataType::QSymmS16,
1568 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001569 };
1570
1571 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1572 "Reference multiplication: input 0 is not a supported type.");
1573
1574 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1575 "Reference multiplication: input 1 is not a supported type.");
1576
1577 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1578 "Reference multiplication: output is not a supported type.");
1579
1580 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1581 "Reference multiplication: input 0 and Input 1 types are mismatched");
1582
1583 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1584 "Reference multiplication: input and output types are mismatched");
1585
1586 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1587 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1588
1589 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001590}
1591
1592bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1593 const TensorInfo& output,
1594 const NormalizationDescriptor& descriptor,
1595 Optional<std::string&> reasonIfUnsupported) const
Nina Drozd661dfa72018-10-02 11:14:17 +01001596{
Jan Eilers8eb25602020-03-09 12:13:48 +00001597 IgnoreUnused(descriptor);
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001598
1599 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001600 std::array<DataType, 6> supportedTypes =
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001601 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001602 DataType::BFloat16,
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001603 DataType::Float16,
1604 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001605 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001606 DataType::QAsymmU8,
1607 DataType::QSymmS16
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001608 };
1609
1610 bool supported = true;
1611
1612 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1613 "Reference normalization: input type not supported.");
1614
1615 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1616 "Reference normalization: output type not supported.");
1617
1618 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1619 "Reference normalization: input and output shapes have different "
1620 "num total elements.");
1621
1622 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001623}
1624
Derek Lamberti901ea112019-12-10 22:07:09 +00001625bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1626 Optional<std::string&> /*reasonIfUnsupported*/) const
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001627{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001628 return true;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001629}
1630
1631bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1632 const TensorInfo& output,
1633 const PadDescriptor& descriptor,
1634 Optional<std::string&> reasonIfUnsupported) const
1635{
Jan Eilers8eb25602020-03-09 12:13:48 +00001636 IgnoreUnused(descriptor);
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001637 bool supported = true;
1638
1639 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001640 std::array<DataType,6> supportedTypes =
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001641 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001642 DataType::BFloat16,
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001643 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001644 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001645 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001646 DataType::QAsymmU8,
1647 DataType::QSymmS16
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001648 };
1649
1650 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1651 "Reference pad: input is not a supported type.");
1652
1653 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1654 "Reference pad: output is not a supported type.");
1655
1656 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1657 "Reference pad: input and output types are mismatched.");
1658
1659 return supported;
Nina Drozd661dfa72018-10-02 11:14:17 +01001660}
1661
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001662bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1663 const TensorInfo& output,
1664 const PermuteDescriptor& descriptor,
1665 Optional<std::string&> reasonIfUnsupported) const
1666{
Jan Eilers8eb25602020-03-09 12:13:48 +00001667 IgnoreUnused(descriptor);
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001668 bool supported = true;
1669
1670 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001671 std::array<DataType, 6> supportedTypes =
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001672 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001673 DataType::BFloat16,
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001674 DataType::Float32,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001675 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001676 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001677 DataType::QAsymmU8,
1678 DataType::QSymmS16
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001679 };
1680
1681 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1682 "Reference permute: input is not a supported type.");
1683
1684 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1685 "Reference permute: output is not a supported type.");
1686
1687 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1688 "Reference permute: input and output types are mismatched.");
1689
1690 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001691}
1692
1693bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1694 const TensorInfo& output,
1695 const Pooling2dDescriptor& descriptor,
1696 Optional<std::string&> reasonIfUnsupported) const
1697{
Jan Eilers8eb25602020-03-09 12:13:48 +00001698 IgnoreUnused(descriptor);
Teresa Charlina3b20472019-06-06 11:12:32 +01001699 bool supported = true;
1700
1701 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001702 std::array<DataType,6> supportedTypes =
Teresa Charlina3b20472019-06-06 11:12:32 +01001703 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001704 DataType::BFloat16,
Teresa Charlina3b20472019-06-06 11:12:32 +01001705 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001706 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001707 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001708 DataType::QAsymmU8,
1709 DataType::QSymmS16
Teresa Charlina3b20472019-06-06 11:12:32 +01001710 };
1711
1712 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1713 "Reference poolind2d: input is not a supported type.");
1714
1715 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1716 "Reference poolind2d: output is not a supported type.");
1717
1718 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1719 "Reference poolind2d: input and output types are mismatched.");
1720
1721 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001722}
1723
James Conroy4f1f8992020-04-29 20:01:10 +01001724bool RefLayerSupport::IsQLstmSupported(const TensorInfo& input,
1725 const TensorInfo& previousOutputIn,
1726 const TensorInfo& previousCellStateIn,
1727 const TensorInfo& outputStateOut,
1728 const TensorInfo& cellStateOut,
1729 const TensorInfo& output,
1730 const QLstmDescriptor& descriptor,
1731 const LstmInputParamsInfo& paramsInfo,
1732 Optional<std::string&> reasonIfUnsupported) const
1733{
1734 IgnoreUnused(input);
1735 IgnoreUnused(previousOutputIn);
1736 IgnoreUnused(previousCellStateIn);
1737 IgnoreUnused(outputStateOut);
1738 IgnoreUnused(cellStateOut);
1739 IgnoreUnused(output);
1740 IgnoreUnused(descriptor);
1741 IgnoreUnused(paramsInfo);
1742
1743 IgnoreUnused(reasonIfUnsupported);
1744
1745 return true;
1746}
1747
Derek Lamberti5f400d62019-03-25 15:41:58 +00001748bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1749 const TensorInfo& output,
1750 Optional<std::string&> reasonIfUnsupported) const
1751{
1752 bool supported = true;
1753
Finn Williamsfd271062019-12-04 14:27:27 +00001754 // Define supported input types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001755 std::array<DataType,7> supportedInputTypes = {
1756 DataType::BFloat16,
Keith Davis5e51cd82020-01-29 16:52:59 +00001757 DataType::Float32,
Keith Davis3d8bc972020-02-04 09:31:47 +00001758 DataType::Float16,
Ryan OShea9add1202020-02-07 10:06:33 +00001759 DataType::QAsymmS8,
Keith Davis5e51cd82020-01-29 16:52:59 +00001760 DataType::QAsymmU8,
1761 DataType::QSymmS8,
1762 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001763 };
1764
1765 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1766 "Reference quantize: input type not supported.");
1767
1768 // Define supported output types.
Ryan OShea9add1202020-02-07 10:06:33 +00001769 std::array<DataType,4> supportedOutputTypes = {
Ryan OShea9add1202020-02-07 10:06:33 +00001770 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001771 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +00001772 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001773 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001774 };
1775 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1776 "Reference quantize: output type not supported.");
1777
1778 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1779 "Reference quantize: input and output shapes have different num total elements.");
1780
1781 return supported;
1782}
1783
Finn Williams2605b232020-06-10 15:53:46 +01001784bool RefLayerSupport::IsRankSupported(const TensorInfo& input,
1785 const TensorInfo& output,
1786 Optional<std::string&> reasonIfUnsupported) const
1787{
1788 IgnoreUnused(input);
1789 // Define supported output types.
1790 std::array<DataType,1> supportedOutputTypes =
1791 {
1792 DataType::Signed32,
1793 };
1794
1795 return CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1796 "Reference rank: input type not supported.");
1797}
1798
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001799bool RefLayerSupport::IsReduceSupported(const TensorInfo& input,
1800 const TensorInfo& output,
1801 const ReduceDescriptor& descriptor,
1802 Optional<std::string&> reasonIfUnsupported) const
1803{
1804 IgnoreUnused(descriptor);
1805 bool supported = true;
1806 std::array<DataType,7> supportedTypes =
1807 {
1808 DataType::BFloat16,
1809 DataType::Float32,
1810 DataType::Float16,
1811 DataType::QAsymmS8,
1812 DataType::QAsymmU8,
1813 DataType::QSymmS16,
1814 DataType::Signed32
1815 };
1816
1817 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1818 "Reference Reduce: input type not supported");
1819
1820 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1821 "Reference Reduce: output type not supported");
1822
1823 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1824 "Reference Reduce: input and output types not matching");
1825
1826 return supported;
1827}
1828
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001829bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001830 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001831 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001832 Optional<std::string&> reasonIfUnsupported) const
1833{
Jan Eilers8eb25602020-03-09 12:13:48 +00001834 IgnoreUnused(output);
1835 IgnoreUnused(descriptor);
Nina Drozd2f2778f2019-05-27 10:37:05 +01001836 // Define supported output types.
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +00001837 std::array<DataType,8> supportedOutputTypes =
Nina Drozd2f2778f2019-05-27 10:37:05 +01001838 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001839 DataType::BFloat16,
Nina Drozd2f2778f2019-05-27 10:37:05 +01001840 DataType::Float32,
1841 DataType::Float16,
Narumol Prangnawarat0718ee92019-09-13 16:53:38 +01001842 DataType::Signed32,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001843 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001844 DataType::QAsymmU8,
Narumol Prangnawarat0c95f4c2020-11-18 16:52:07 +00001845 DataType::QSymmS16,
1846 DataType::Boolean
Nina Drozd2f2778f2019-05-27 10:37:05 +01001847 };
Keith Davis0c2eeac2020-02-11 16:51:50 +00001848
Nina Drozd2f2778f2019-05-27 10:37:05 +01001849 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1850 "Reference reshape: input type not supported.");
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001851}
1852
Teresa Charlin970f43b2019-07-01 13:51:07 +01001853bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1854 const TensorInfo& output,
1855 const ResizeDescriptor& descriptor,
1856 Optional<std::string&> reasonIfUnsupported) const
1857{
Jan Eilers8eb25602020-03-09 12:13:48 +00001858 IgnoreUnused(descriptor);
Teresa Charlin970f43b2019-07-01 13:51:07 +01001859 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001860 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001861 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001862 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001863 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001864 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001865 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001866 DataType::QAsymmU8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001867 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001868 };
1869
1870 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1871 "Reference Resize: input type not supported");
1872
1873 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1874 "Reference Resize: output type not supported");
1875
1876 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1877 "Reference Resize: input and output types not matching");
1878
1879 return supported;
1880}
1881
Keith Davis3ae3f972021-05-21 16:33:48 +01001882bool RefLayerSupport::IsShapeSupported(const TensorInfo& input,
1883 const TensorInfo& output,
1884 Optional<std::string&> reasonIfUnsupported) const
1885{
1886 IgnoreUnused(input);
1887 bool supported = true;
1888
1889 std::array<DataType, 1> supportedTypes =
1890 {
1891 DataType::Signed32
1892 };
1893
1894 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1895 "Reference Shape: output type not supported");
1896
1897 return supported;
1898}
1899
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001900bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1901 const TensorInfo& output,
1902 const SliceDescriptor& descriptor,
1903 Optional<std::string&> reasonIfUnsupported) const
1904{
Jan Eilers8eb25602020-03-09 12:13:48 +00001905 IgnoreUnused(descriptor);
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001906 bool supported = true;
1907
Sadik Armagan303980c2020-04-17 12:45:14 +01001908 std::array<DataType, 5> supportedTypes =
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001909 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001910 DataType::BFloat16,
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001911 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001912 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001913 DataType::QAsymmU8,
1914 DataType::QSymmS16
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001915 };
1916
1917 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1918 "Reference Slice: input type not supported");
1919
1920 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1921 "Reference Slice: output type not supported");
1922
1923 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1924 "Reference Slice: input and output types are mismatched");
1925
1926 return supported;
1927}
1928
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001929bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1930 const TensorInfo& output,
1931 const SoftmaxDescriptor& descriptor,
1932 Optional<std::string&> reasonIfUnsupported) const
1933{
Jan Eilers8eb25602020-03-09 12:13:48 +00001934 IgnoreUnused(descriptor);
nikraj01248683f2019-05-29 16:46:50 +01001935 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001936 std::array<DataType,7> supportedTypes =
nikraj01248683f2019-05-29 16:46:50 +01001937 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001938 DataType::BFloat16,
1939 DataType::Float32,
1940 DataType::Float16,
1941 DataType::QSymmS8,
1942 DataType::QAsymmS8,
1943 DataType::QAsymmU8,
1944 DataType::QSymmS16
nikraj01248683f2019-05-29 16:46:50 +01001945 };
1946
1947 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001948 "Reference Softmax: output type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001949
1950 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001951 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001952
1953 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001954 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001955
1956 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001957}
1958
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001959bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1960 const TensorInfo& output,
1961 const SpaceToBatchNdDescriptor& descriptor,
1962 Optional<std::string&> reasonIfUnsupported) const
1963{
Jan Eilers8eb25602020-03-09 12:13:48 +00001964 IgnoreUnused(descriptor);
nikraj01120522a2019-05-31 11:33:07 +01001965 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001966 std::array<DataType,6> supportedTypes =
nikraj01120522a2019-05-31 11:33:07 +01001967 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001968 DataType::BFloat16,
1969 DataType::Float32,
1970 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001971 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001972 DataType::QAsymmU8,
1973 DataType::QSymmS16
nikraj01120522a2019-05-31 11:33:07 +01001974 };
1975
1976 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1977 "Reference SpaceToBatchNd: input type not supported");
1978
1979 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1980 "Reference SpaceToBatchNd: output type not supported");
1981
1982 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1983 "Reference SpaceToBatchNd: input and output types are mismatched");
1984
1985 return supported;
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001986}
1987
Keith Davisa57eccb2019-06-14 17:33:22 +01001988bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
Keith Davis51910332019-06-26 15:28:43 +01001989 const TensorInfo& output,
1990 const SpaceToDepthDescriptor& descriptor,
1991 Optional<std::string&> reasonIfUnsupported) const
Keith Davisa57eccb2019-06-14 17:33:22 +01001992{
1993
Jan Eilers8eb25602020-03-09 12:13:48 +00001994 IgnoreUnused(descriptor);
Keith Davisa57eccb2019-06-14 17:33:22 +01001995 bool supported = true;
1996
Sadik Armagan303980c2020-04-17 12:45:14 +01001997 std::array<DataType,6> supportedTypes =
Keith Davisa57eccb2019-06-14 17:33:22 +01001998 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001999 DataType::BFloat16,
Keith Davisa57eccb2019-06-14 17:33:22 +01002000 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002001 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002002 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002003 DataType::QAsymmU8,
2004 DataType::QSymmS16
Keith Davisa57eccb2019-06-14 17:33:22 +01002005 };
2006
2007 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2008 "Reference SpaceToDepth: input type not supported");
2009
2010 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2011 "Reference SpaceToDepth: output type not supported");
2012
2013 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2014 "Reference SpaceToDepth: input and output types are mismatched");
2015
2016 return supported;
2017}
2018
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002019bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01002020 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
2021 const ViewsDescriptor& descriptor,
2022 Optional<std::string&> reasonIfUnsupported) const
2023{
Jan Eilers8eb25602020-03-09 12:13:48 +00002024 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002025 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002026 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002027 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002028 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002029 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002030 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002031 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002032 DataType::QAsymmU8,
2033 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002034 };
2035
2036 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2037 "Reference splitter: output type not supported");
Derek Lambertieac4adb2020-08-25 13:05:59 +01002038 for (const TensorInfo& output : outputs)
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002039 {
2040 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2041 "Reference splitter: input type not supported");
2042
2043 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2044 "Reference splitter: input and output types mismatched.");
2045 }
2046
2047 return supported;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01002048}
2049
Matthew Jackson81e601c2019-07-11 12:07:09 +01002050bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
2051 const TensorInfo& output,
2052 const StackDescriptor& descriptor,
2053 Optional<std::string&> reasonIfUnsupported) const
2054{
Jan Eilers8eb25602020-03-09 12:13:48 +00002055 IgnoreUnused(descriptor);
Matthew Jackson81e601c2019-07-11 12:07:09 +01002056
2057 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01002058 std::array<DataType,6> supportedTypes =
Matthew Jackson81e601c2019-07-11 12:07:09 +01002059 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002060 DataType::BFloat16,
Matthew Jackson81e601c2019-07-11 12:07:09 +01002061 DataType::Float32,
Matthew Jacksone69c3992019-09-09 14:31:21 +01002062 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002063 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002064 DataType::QAsymmU8,
2065 DataType::QSymmS16
Matthew Jackson81e601c2019-07-11 12:07:09 +01002066 };
2067
2068 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2069 "Reference stack: output type not supported");
2070 for (const TensorInfo* input : inputs)
2071 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002072 ARMNN_ASSERT(input != nullptr);
Matthew Jackson81e601c2019-07-11 12:07:09 +01002073 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
2074 "Reference stack: input type not supported");
2075
2076 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
2077 "Reference stack: input and output types mismatched.");
2078 }
2079
2080 return supported;
2081}
2082
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002083bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
2084 const TensorInfo& output,
2085 const StridedSliceDescriptor& descriptor,
2086 Optional<std::string&> reasonIfUnsupported) const
2087{
Jan Eilers8eb25602020-03-09 12:13:48 +00002088 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002089 bool supported = true;
2090
Sadik Armagan303980c2020-04-17 12:45:14 +01002091 std::array<DataType,5> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002092 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002093 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002094 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01002095 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002096 DataType::QAsymmU8,
2097 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002098 };
2099
2100 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2101 "Reference StridedSlice: input type not supported");
2102
2103 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2104 "Reference StridedSlice: output type not supported");
2105
2106 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2107 "Reference StridedSlice: input and output types are mismatched");
2108
2109 return supported;
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002110}
2111
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002112bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
2113 const TensorInfo& input1,
2114 const TensorInfo& output,
2115 Optional<std::string&> reasonIfUnsupported) const
2116{
Sadik Armagan2999a022019-04-09 14:20:12 +01002117 bool supported = true;
2118
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002119 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002120 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01002121 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002122 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002123 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002124 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002125 DataType::QSymmS16,
2126 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01002127 };
2128
2129 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
2130 "Reference subtraction: input 0 is not a supported type.");
2131
2132 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
2133 "Reference subtraction: input 1 is not a supported type.");
2134
2135 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2136 "Reference subtraction: output is not a supported type.");
2137
2138 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
2139 "Reference subtraction: input 0 and Input 1 types are mismatched");
2140
2141 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
2142 "Reference subtraction: input and output types are mismatched");
2143
2144 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
2145 "Reference subtraction: shapes are not suitable for implicit broadcast.");
2146
2147 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002148}
2149
Matteo Martincighab9e5252019-06-13 17:27:46 +01002150bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
2151 const TensorInfo& alpha,
2152 const TensorInfo& output,
2153 Optional<std::string&> reasonIfUnsupported) const
2154{
2155 bool supported = true;
2156
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002157 std::array<DataType, 6> supportedTypes
Matteo Martincighab9e5252019-06-13 17:27:46 +01002158 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002159 DataType::BFloat16,
Matteo Martincighab9e5252019-06-13 17:27:46 +01002160 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002161 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002162 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002163 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002164 DataType::QSymmS16
Matteo Martincighab9e5252019-06-13 17:27:46 +01002165 };
2166
2167 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2168 "PReLU: input is not a supported type.");
2169
2170 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
2171 "PReLU: alpha is not a supported type.");
2172
2173 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2174 "PReLU: output is not a supported type.");
2175
2176 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
2177 "PReLU: input, alpha and output types are mismatched");
2178
2179 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
2180 "PReLU: shapes are not suitable for implicit broadcast");
2181
2182 return supported;
2183}
2184
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002185bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
2186 const TensorInfo& output,
2187 const TransposeConvolution2dDescriptor& descriptor,
2188 const TensorInfo& weights,
2189 const Optional<TensorInfo>& biases,
2190 Optional<std::string&> reasonIfUnsupported) const
2191{
Jan Eilers8eb25602020-03-09 12:13:48 +00002192 IgnoreUnused(descriptor);
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002193 bool supported = true;
2194
Sadik Armagan303980c2020-04-17 12:45:14 +01002195 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002196 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002197 DataType::BFloat16,
2198 DataType::Float32,
2199 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002200 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002201 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +01002202 DataType::QSymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002203 DataType::QSymmS16
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002204 };
2205
2206 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2207 "Reference TransposeConvolution2d: input is not a supported type.");
2208
2209 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2210 "Reference TransposeConvolution2d: output is not a supported type.");
2211
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002212 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2213 "Reference TransposeConvolution2d: input and output types mismatched.");
2214
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002215
2216 const DataType inputType = input.GetDataType();
Sadik Armagan303980c2020-04-17 12:45:14 +01002217 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002218 {
Jan Eilers1b2654f2021-09-24 15:45:46 +01002219 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002220 {
Sadik Armagan303980c2020-04-17 12:45:14 +01002221 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002222 DataType::QAsymmU8,
Jan Eilers1b2654f2021-09-24 15:45:46 +01002223 DataType::QSymmS8
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002224 };
2225
2226 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
2227 "Reference TransposeConvolution2d: weights type not supported for "
2228 "quantized input.");
2229 }
2230 else
2231 {
2232 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
2233 "Reference TransposeConvolution2d: weights is not a supported type.");
2234
2235 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
2236 "Reference TransposeConvolution2d: input and weights types mismatched.");
2237 }
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002238
2239 if (biases.has_value())
2240 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002241 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar651aafe2019-08-05 11:52:05 +01002242 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002243 DataType::BFloat16,
2244 DataType::Float32,
2245 DataType::Float16,
2246 DataType::Signed32
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002247 };
2248 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
2249 "Reference TransposeConvolution2d: biases is not a supported type.");
2250 }
2251
2252 return supported;
2253}
2254
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002255bool RefLayerSupport::IsTransposeSupported(const TensorInfo& input,
2256 const TensorInfo& output,
2257 const TransposeDescriptor& descriptor,
2258 Optional<std::string&> reasonIfUnsupported) const
2259{
Jan Eilers8eb25602020-03-09 12:13:48 +00002260 IgnoreUnused(descriptor);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002261 bool supported = true;
2262
2263 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01002264 std::array<DataType, 6> supportedTypes =
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002265 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002266 DataType::BFloat16,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002267 DataType::Float32,
2268 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002269 DataType::QAsymmS8,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002270 DataType::QAsymmU8,
2271 DataType::QSymmS16
2272 };
2273
2274 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2275 "Reference transpose: input is not a supported type.");
2276
2277 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2278 "Reference transpose: output is not a supported type.");
2279
2280 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2281 "Reference transpose: input and output types are mismatched.");
2282
2283 return supported;
2284}
2285
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002286bool RefLayerSupport::IsUnidirectionalSequenceLstmSupported(
2287 const TensorInfo& input,
2288 const TensorInfo& outputStateIn,
2289 const TensorInfo& cellStateIn,
2290 const TensorInfo& output,
2291 const Optional<TensorInfo>& hiddenStateOutput,
2292 const Optional<TensorInfo>& cellStateOutput,
2293 const UnidirectionalSequenceLstmDescriptor& descriptor,
2294 const LstmInputParamsInfo& paramsInfo,
2295 Optional<std::string&> reasonIfUnsupported) const
2296{
2297 IgnoreUnused(descriptor);
2298 IgnoreUnused(paramsInfo);
2299 IgnoreUnused(outputStateIn);
2300 IgnoreUnused(cellStateIn);
2301 bool supported = true;
2302
2303 if (hiddenStateOutput.has_value() || cellStateOutput.has_value())
2304 {
2305 reasonIfUnsupported.value() += "Reference UnidirectionalSequenceLstm: hidden state output "
2306 "and cell state output are not supported at the moment.";
2307 }
2308
2309 std::array<DataType, 1> supportedTypes =
2310 {
2311 DataType::Float32
2312 };
2313
Narumol Prangnawaratbd575b22021-08-31 16:53:54 +01002314 std::array<DataType, 2> supportedWeightTypes =
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002315 {
Narumol Prangnawaratbd575b22021-08-31 16:53:54 +01002316 DataType::Float32,
2317 DataType::QAsymmS8
Narumol Prangnawarate5339e72021-07-28 17:33:28 +01002318 };
2319
2320 // check inputs and outputs
2321 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2322 "Reference UnidirectionalSequenceLstm: input is not a supported type.");
2323 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
2324 "Reference UnidirectionalSequenceLstm: input and outputStateIn types are mismatched");
2325 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
2326 "Reference UnidirectionalSequenceLstm: input and cellStateIn types are mismatched");
2327
2328 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2329 "Reference UnidirectionalSequenceLstm: input and output types are mismatched");
2330 // check layer parameters
2331 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToForgetWeights(), supportedWeightTypes),
2332 reasonIfUnsupported,
2333 "Reference UnidirectionalSequenceLstm: InputToForgetWeights "
2334 "is not a supported type.");
2335 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToCellWeights(), supportedWeightTypes),
2336 reasonIfUnsupported,
2337 "Reference UnidirectionalSequenceLstm: InputToCellWeights is not a supported type.");
2338 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToOutputWeights(), supportedWeightTypes),
2339 reasonIfUnsupported,
2340 "Reference UnidirectionalSequenceLstm: InputToOutputWeights "
2341 "is not a supported type.");
2342 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToForgetWeights(), supportedWeightTypes),
2343 reasonIfUnsupported,
2344 "Reference UnidirectionalSequenceLstm: RecurrentToForgetWeights "
2345 "is not a supported type.");
2346 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToCellWeights(), supportedWeightTypes),
2347 reasonIfUnsupported,
2348 "Reference UnidirectionalSequenceLstm: RecurrentToCellWeights "
2349 "is not a supported type.");
2350 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToOutputWeights(), supportedWeightTypes),
2351 reasonIfUnsupported,
2352 "Reference UnidirectionalSequenceLstm: RecurrentToOutputWeights "
2353 "is not a supported type.");
2354 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
2355 "Reference UnidirectionalSequenceLstm: input and ForgetGateBias types "
2356 "are mismatched");
2357 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
2358 "Reference UnidirectionalSequenceLstm: input and CellBias types are mismatched");
2359 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
2360 "Reference UnidirectionalSequenceLstm: input and OutputGateBias types "
2361 "are mismatched");
2362 if (!descriptor.m_CifgEnabled)
2363 {
2364 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputToInputWeights(), supportedWeightTypes),
2365 reasonIfUnsupported,
2366 "Reference UnidirectionalSequenceLstm: InputToInputWeights "
2367 "is not a supported type.");
2368 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetRecurrentToInputWeights(), supportedWeightTypes),
2369 reasonIfUnsupported,
2370 "Reference UnidirectionalSequenceLstm: RecurrentToInputWeights "
2371 "is not a supported type.");
2372 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
2373 "Reference UnidirectionalSequenceLstm: input and InputGateBias types "
2374 "are mismatched");
2375 if (descriptor.m_PeepholeEnabled)
2376 {
2377 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToInputWeights(), supportedWeightTypes),
2378 reasonIfUnsupported,
2379 "Reference UnidirectionalSequenceLstm: CellToInputWeights "
2380 "is not a supported type.");
2381 }
2382 }
2383 if (descriptor.m_PeepholeEnabled)
2384 {
2385 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToForgetWeights(), supportedWeightTypes),
2386 reasonIfUnsupported,
2387 "Reference UnidirectionalSequenceLstm: CellToForgetWeights "
2388 "is not a supported type.");
2389 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellToOutputWeights(), supportedWeightTypes),
2390 reasonIfUnsupported,
2391 "Reference UnidirectionalSequenceLstm: CellToOutputWeights "
2392 "is not a supported type.");
2393 }
2394 if (descriptor.m_ProjectionEnabled)
2395 {
2396 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetProjectionWeights(), supportedWeightTypes),
2397 reasonIfUnsupported,
2398 "Reference UnidirectionalSequenceLstm: ProjectionWeights "
2399 "is not a supported type.");
2400 if (paramsInfo.m_ProjectionBias != nullptr)
2401 {
2402 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
2403 "Reference UnidirectionalSequenceLstm: input and ProjectionBias types "
2404 "are mismatched");
2405 }
2406 }
2407 if (descriptor.m_LayerNormEnabled)
2408 {
2409 if (!descriptor.m_CifgEnabled)
2410 {
2411 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetInputLayerNormWeights(), supportedWeightTypes),
2412 reasonIfUnsupported,
2413 "Reference UnidirectionalSequenceLstm: InputLayerNormWeights "
2414 "is not a supported type.");
2415 }
2416 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetForgetLayerNormWeights(), supportedWeightTypes),
2417 reasonIfUnsupported,
2418 "Reference UnidirectionalSequenceLstm: ForgetLayerNormWeights "
2419 "is not a supported type.");
2420 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetCellLayerNormWeights(), supportedWeightTypes),
2421 reasonIfUnsupported,
2422 "Reference UnidirectionalSequenceLstm: CellLayerNormWeights "
2423 "is not a supported type.");
2424 supported &= CheckSupportRule(TypeAnyOf(paramsInfo.GetOutputLayerNormWeights(), supportedWeightTypes),
2425 reasonIfUnsupported,
2426 "Reference UnidirectionalSequenceLstm: OutputLayerNormWeights "
2427 "is not a supported type.");
2428 }
2429
2430 return supported;
2431}
2432
arovir011c7c81b2018-10-08 11:34:28 +01002433} // namespace armnn