blob: 7b25a436e91f5dc8cc218ccf73ab3eb34b3d6335 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "RefLayerSupport.hpp"
David Beck3cc9a622018-10-12 10:38:31 +01007
Keith Davis0c2eeac2020-02-11 16:51:50 +00008#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009#include <armnn/Types.hpp>
Derek Lamberti50db4e82019-03-13 14:16:15 +000010#include <armnn/Descriptors.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000011#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Matteo Martincighe011d202019-11-28 11:35:47 +000013#include <LayerSupportCommon.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010014#include <backendsCommon/LayerSupportRules.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000015
Matteo Martincighe011d202019-11-28 11:35:47 +000016#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000017
Derek Lamberti50db4e82019-03-13 14:16:15 +000018#include <vector>
Derek Lamberti50db4e82019-03-13 14:16:15 +000019#include <array>
20
telsoa014fcda012018-03-09 14:13:49 +000021using namespace boost;
22
23namespace armnn
24{
25
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010026namespace
27{
28
29template<typename Float32Func, typename Uint8Func, typename ... Params>
30bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
31 DataType dataType,
32 Float32Func floatFuncPtr,
33 Uint8Func uint8FuncPtr,
34 Params&&... params)
35{
36 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
37 dataType,
38 &FalseFunc<Params...>,
39 floatFuncPtr,
40 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +000041 &FalseFunc<Params...>,
kevmay012b4d88e2019-01-24 14:05:09 +000042 &FalseFunc<Params...>,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010043 std::forward<Params>(params)...);
44}
45
46} // anonymous namespace
47
James Conroy4d1ff582019-06-10 17:06:39 +010048namespace
49{
50
51std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
52 unsigned int actual,
53 std::string& layerStr,
54 std::string& tensorName)
55{
56 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
57 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
58
59 return errorMsg;
60}
61
62} // anonymous namespace
Derek Lamberti50db4e82019-03-13 14:16:15 +000063
Sadik Armagan9199e582019-09-05 17:35:31 +010064bool RefLayerSupport::IsAbsSupported(const TensorInfo& input, const TensorInfo& output,
65 Optional<std::string&> reasonIfUnsupported) const
66{
josh minor4a3c6102020-01-06 16:40:46 -060067 return IsElementwiseUnarySupported(input,
68 output,
69 ElementwiseUnaryDescriptor(UnaryOperation::Abs),
70 reasonIfUnsupported);
Sadik Armagan9199e582019-09-05 17:35:31 +010071}
72
arovir011c7c81b2018-10-08 11:34:28 +010073bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
74 const TensorInfo& output,
75 const ActivationDescriptor& descriptor,
76 Optional<std::string&> reasonIfUnsupported) const
77{
Derek Lamberti50db4e82019-03-13 14:16:15 +000078 bool supported = true;
79
80 // Define supported types.
Keith Davis0c2eeac2020-02-11 16:51:50 +000081 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000082 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +000083 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +010084 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +000085 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +000086 DataType::QAsymmU8,
87 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +000088 };
89
90 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
91 "Reference activation: input type not supported.");
92
93 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
94 "Reference activation: output type not supported.");
95
96 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
97 "Reference activation: input and output types mismatched.");
98
99 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
100 "Reference activation: input and output shapes are of different rank.");
101
102
103 struct ActivationFunctionSupported : public Rule
104 {
105 ActivationFunctionSupported(const ActivationDescriptor& desc)
106 {
107 switch(desc.m_Function)
108 {
109 case ActivationFunction::Abs:
110 case ActivationFunction::BoundedReLu:
David Monahan3b3c3812020-02-25 09:03:29 +0000111 case ActivationFunction::Elu:
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000112 case ActivationFunction::HardSwish:
Derek Lamberti50db4e82019-03-13 14:16:15 +0000113 case ActivationFunction::LeakyReLu:
114 case ActivationFunction::Linear:
115 case ActivationFunction::ReLu:
116 case ActivationFunction::Sigmoid:
117 case ActivationFunction::SoftReLu:
118 case ActivationFunction::Sqrt:
119 case ActivationFunction::Square:
120 case ActivationFunction::TanH:
121 {
122 m_Res = true;
123 break;
124 }
125 default:
126 {
127 m_Res = false;
128 break;
129 }
130 }
131 }
132 };
133
134 // Function is supported
135 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
136 "Reference activation: function not supported.");
137
138 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100139}
140
141bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
142 const TensorInfo& input1,
143 const TensorInfo& output,
144 Optional<std::string&> reasonIfUnsupported) const
145{
Derek Lamberti50db4e82019-03-13 14:16:15 +0000146 bool supported = true;
147
Keith Davis0c2eeac2020-02-11 16:51:50 +0000148 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000149 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +0000150 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100151 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000152 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000153 DataType::QAsymmU8,
154 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +0000155 };
156
157 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
158 "Reference addition: input 0 is not a supported type.");
159
160 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
161 "Reference addition: input 1 is not a supported type.");
162
163 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
164 "Reference addition: output is not a supported type.");
165
166 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
167 "Reference addition: input 0 and Input 1 types are mismatched");
168
169 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
170 "Reference addition: input and output types are mismatched");
171
172 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
173 "Reference addition: shapes are not suitable for implicit broadcast.");
174
175 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100176}
177
Nikhil Raj68c2c902019-09-19 11:21:11 +0100178bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
179 const armnn::ArgMinMaxDescriptor &descriptor,
180 armnn::Optional<std::string &> reasonIfUnsupported) const
181{
Jan Eilers8eb25602020-03-09 12:13:48 +0000182 IgnoreUnused(descriptor);
Nikhil Raj68c2c902019-09-19 11:21:11 +0100183
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000184 std::array<DataType, 5> supportedTypes =
Nikhil Raj68c2c902019-09-19 11:21:11 +0100185 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000186 DataType::BFloat16,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100187 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000188 DataType::QAsymmU8,
189 DataType::QSymmS16,
Francis Murtagh1939df52019-11-13 15:21:09 +0000190 DataType::Signed32
Nikhil Raj68c2c902019-09-19 11:21:11 +0100191 };
192
193 bool supported = true;
194
195 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
196 "Reference ArgMinMax: input is not a supported type.");
197 supported &= CheckSupportRule(TypeIs(output, DataType::Signed32), reasonIfUnsupported,
198 "Reference ArgMinMax: output type not supported");
199
200 return supported;
201}
202
arovir011c7c81b2018-10-08 11:34:28 +0100203bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
204 const TensorInfo& output,
205 const TensorInfo& mean,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100206 const TensorInfo& variance,
arovir011c7c81b2018-10-08 11:34:28 +0100207 const TensorInfo& beta,
208 const TensorInfo& gamma,
209 const BatchNormalizationDescriptor& descriptor,
210 Optional<std::string&> reasonIfUnsupported) const
211{
Jan Eilers8eb25602020-03-09 12:13:48 +0000212 IgnoreUnused(descriptor);
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100213
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000214 std::array<DataType, 5> supportedTypes =
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100215 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000216 DataType::BFloat16,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100217 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100218 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000219 DataType::QAsymmU8,
220 DataType::QSymmS16
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100221 };
222
223 bool supported = true;
224
225 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
226 "Reference batch normalization: input is not a supported type.");
227
228 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
229 "Reference batch normalization: output is not a supported type.");
230
231 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
232 "Reference batch normalization: input and output types are mismatched");
233
234 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
235 "Reference batch normalization: mean is not a supported type.");
236
237 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
238 "Reference batch normalization: variance is not a supported type.");
239
240 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
241 "Reference batch normalization: beta is not a supported type.");
242
243 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
244 "Reference batch normalization: gamma is not a supported type.");
245
246 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100247}
248
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000249bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
250 const TensorInfo& output,
251 const BatchToSpaceNdDescriptor& descriptor,
252 Optional<std::string&> reasonIfUnsupported) const
253{
Jan Eilers8eb25602020-03-09 12:13:48 +0000254 IgnoreUnused(descriptor);
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100255
256 bool supported = true;
257
258 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
259 std::string inputTensorStr = "input";
260 std::string outputTensorStr = "output";
261
262 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000263 std::array<DataType,5> supportedTypes =
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100264 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000265 DataType::BFloat16,
266 DataType::Float32,
267 DataType::Float16,
268 DataType::QAsymmU8,
269 DataType::QSymmS16
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100270 };
271
272 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
273 "Reference BatchToSpaceNd: input type not supported.");
274
275 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
276 "Reference BatchToSpaceNd: output type not supported.");
277
278 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
279 "Reference BatchToSpaceNd: input and output types mismatched.");
280
281 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
282 reasonIfUnsupported,
283 CreateIncorrectDimensionsErrorMsg(4,
284 output.GetNumDimensions(),
285 batchToSpaceNdLayerStr,
286 outputTensorStr).data());
287
288 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
289 reasonIfUnsupported,
290 CreateIncorrectDimensionsErrorMsg(4,
291 input.GetNumDimensions(),
292 batchToSpaceNdLayerStr,
293 inputTensorStr).data());
294
295 return supported;
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000296}
297
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100298bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
299 const TensorInfo& input1,
300 const TensorInfo& output,
301 const ComparisonDescriptor& descriptor,
302 Optional<std::string&> reasonIfUnsupported) const
303{
Jan Eilers8eb25602020-03-09 12:13:48 +0000304 IgnoreUnused(descriptor);
Sadik Armaganb60dd242020-03-19 13:53:16 +0000305 std::array<DataType, 7> supportedInputTypes =
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100306 {
Sadik Armaganb60dd242020-03-19 13:53:16 +0000307 DataType::Boolean,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000308 DataType::BFloat16,
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100309 DataType::Float32,
310 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000311 DataType::QAsymmU8,
Sadik Armaganb60dd242020-03-19 13:53:16 +0000312 DataType::QSymmS16,
313 DataType::Signed32
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100314 };
315
316 bool supported = true;
317 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
318 "Reference comparison: input 0 is not a supported type");
319
320 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
321 "Reference comparison: input 0 and Input 1 types are mismatched");
322
323 supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
324 "Reference comparison: output is not of type Boolean");
325
326 return supported;
327}
328
Jim Flynn906f9462019-05-10 13:55:21 +0100329bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
330 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +0100331 const ConcatDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100332 Optional<std::string&> reasonIfUnsupported) const
333{
Jan Eilers8eb25602020-03-09 12:13:48 +0000334 IgnoreUnused(descriptor);
Jim Flynne242f2d2019-05-22 14:24:13 +0100335
336 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000337 std::array<DataType,6> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100338 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000339 DataType::BFloat16,
340 DataType::Float32,
341 DataType::Float16,
342 DataType::QAsymmU8,
343 DataType::QAsymmS8,
344 DataType::QSymmS16
Jim Flynne242f2d2019-05-22 14:24:13 +0100345 };
346
347 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
348 "Reference concatenation: output type not supported");
349 for (const TensorInfo* input : inputs)
350 {
Matthew Jackson81e601c2019-07-11 12:07:09 +0100351 BOOST_ASSERT(input != nullptr);
Jim Flynne242f2d2019-05-22 14:24:13 +0100352 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
353 "Reference concatenation: input type not supported");
354
355 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
356 "Reference concatenation: input and output types mismatched.");
357 }
358
359 return supported;
Jim Flynn906f9462019-05-10 13:55:21 +0100360}
361
arovir011c7c81b2018-10-08 11:34:28 +0100362bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
363 Optional<std::string&> reasonIfUnsupported) const
364{
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000365 std::array<DataType,7> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100366 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000367 DataType::BFloat16,
Nina Drozd58ef2c62019-05-16 12:09:18 +0100368 DataType::Float32,
369 DataType::Signed32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000370 DataType::QAsymmU8,
Keith Davis67e6c542020-02-19 10:08:33 +0000371 DataType::QAsymmS8,
Keith Davis5204aa82020-01-27 15:24:59 +0000372 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000373 DataType::QSymmS16
Nina Drozd58ef2c62019-05-16 12:09:18 +0100374 };
375
376 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
377 "Reference constant: output is not a supported type.");
arovir011c7c81b2018-10-08 11:34:28 +0100378}
379
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000380bool RefLayerSupport::IsConvertBf16ToFp32Supported(const TensorInfo& input,
381 const TensorInfo& output,
382 Optional<std::string&> reasonIfUnsupported) const
383{
384 bool supported = true;
385
386 supported &= CheckSupportRule(TypeIs(input, DataType::BFloat16), reasonIfUnsupported,
387 "Reference for ConvertBf16ToFp32 layer: input type not supported");
388
389 supported &= CheckSupportRule(TypeIs(output, DataType::Float32), reasonIfUnsupported,
390 "Reference for ConvertBf16ToFp32 layer: output type not supported");
391
392 return supported;
393}
394
arovir011c7c81b2018-10-08 11:34:28 +0100395bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
396 const TensorInfo& output,
397 Optional<std::string&> reasonIfUnsupported) const
398{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100399 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
400 input.GetDataType(),
401 &TrueFunc<>,
402 &FalseInputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000403 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000404 &FalseFuncI32<>,
405 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100406 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
407 output.GetDataType(),
408 &FalseOutputFuncF16<>,
409 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000410 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000411 &FalseFuncI32<>,
412 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100413}
414
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000415bool RefLayerSupport::IsConvertFp32ToBf16Supported(const TensorInfo& input,
416 const TensorInfo& output,
417 Optional<std::string&> reasonIfUnsupported) const
418{
419 bool supported = true;
420
421 supported &= CheckSupportRule(TypeIs(input, DataType::Float32), reasonIfUnsupported,
422 "Reference for ConvertFp32ToBf16 layer: input type not supported");
423
424 supported &= CheckSupportRule(TypeIs(output, DataType::BFloat16), reasonIfUnsupported,
425 "Reference for ConvertFp32ToBf16 layer: output type not supported");
426
427 return supported;
428}
429
arovir011c7c81b2018-10-08 11:34:28 +0100430bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
431 const TensorInfo& output,
432 Optional<std::string&> reasonIfUnsupported) const
433{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100434 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
435 input.GetDataType(),
436 &FalseInputFuncF16<>,
437 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000438 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000439 &FalseFuncI32<>,
440 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100441 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
442 output.GetDataType(),
443 &TrueFunc<>,
444 &FalseOutputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000445 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000446 &FalseFuncI32<>,
447 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100448}
449
450bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
451 const TensorInfo& output,
452 const Convolution2dDescriptor& descriptor,
453 const TensorInfo& weights,
454 const Optional<TensorInfo>& biases,
455 Optional<std::string&> reasonIfUnsupported) const
456{
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100457 bool supported = true;
458
459 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000460 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000461 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000462 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000463 DataType::Float32,
464 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000465 DataType::QAsymmU8,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000466 DataType::QAsymmS8,
Keith Davis5204aa82020-01-27 15:24:59 +0000467 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000468 DataType::QSymmS16
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100469 };
470
471 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000472 "Reference Convolution2d: input is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100473
474 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000475 "Reference Convolution2d: output is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100476
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000477 // For Convolution2d, we allow to have BFloat16 input with Float32 output for optimization.
478 if (input.GetDataType() == DataType::BFloat16)
479 {
480 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
481 {
482 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
483 supported = false;
484 }
485 }
486 else
487 {
488 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000489 "Reference Convolution2d: input and output types mismatched.");
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000490 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100491
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000492 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000493 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000494 {
Derek Lambertid466a542020-01-22 15:37:29 +0000495 ARMNN_NO_DEPRECATE_WARN_BEGIN
Keith Davis0c2eeac2020-02-11 16:51:50 +0000496 std::array<DataType, 4> supportedWeightTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000497 {
Derek Lambertif90c56d2020-01-10 17:14:08 +0000498 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +0000499 DataType::QSymmS8,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000500 DataType::QAsymmS8,
Derek Lambertid466a542020-01-22 15:37:29 +0000501 DataType::QuantizedSymm8PerAxis // deprecated
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000502 };
Derek Lambertid466a542020-01-22 15:37:29 +0000503 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000504
505 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000506 "Reference Convolution2d: weights type not supported for quantized input.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000507 }
508 else
509 {
510 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000511 "Reference Convolution2d: weights is not a supported type.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000512
513 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000514 "Reference Convolution2d: input and weights types mismatched.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000515 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100516
517 if (biases.has_value())
518 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000519 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000520 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000521 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000522 DataType::Float32,
523 DataType::Float16,
524 DataType::Signed32
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100525 };
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000526
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100527 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000528 "Reference Convolution2d: biases is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100529 }
Jan Eilers8eb25602020-03-09 12:13:48 +0000530 IgnoreUnused(descriptor);
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100531
532 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100533}
534
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000535bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
536 const TensorInfo& output,
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000537 Optional<std::string&> reasonIfUnsupported) const
538{
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100539 bool supported = true;
540
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000541 std::array<DataType, 8> supportedTypes =
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100542 {
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000543 DataType::BFloat16,
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000544 DataType::Float16,
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100545 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000546 DataType::QAsymmU8,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000547 DataType::QAsymmS8,
Keith Davis5204aa82020-01-27 15:24:59 +0000548 DataType::QSymmS8,
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000549 DataType::QSymmS16,
550 DataType::Signed32
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100551 };
552
553 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000554 "Reference for Debug layer: input type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100555
556 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000557 "Reference for Debug layer: output type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100558
559 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000560 "Reference for Debug layer: input and output types are mismatched");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100561
562 return supported;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000563}
564
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100565bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
566 const TensorInfo& output,
567 const DepthToSpaceDescriptor& descriptor,
568 Optional<std::string&> reasonIfUnsupported) const
569{
Jan Eilers8eb25602020-03-09 12:13:48 +0000570 IgnoreUnused(descriptor);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100571 bool supported = true;
572
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000573 std::array<DataType,5> supportedTypes =
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100574 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000575 DataType::BFloat16,
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100576 DataType::Float32,
577 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000578 DataType::QAsymmU8,
579 DataType::QSymmS16
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100580 };
581
582 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
583 "Reference DepthToSpace: input type not supported");
584
585 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
586 "Reference DepthToSpace: output type not supported");
587
588 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
589 "Reference DepthToSpace: input and output types are mismatched");
590
591 return supported;
592}
593
arovir011c7c81b2018-10-08 11:34:28 +0100594bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
595 const TensorInfo& output,
596 const DepthwiseConvolution2dDescriptor& descriptor,
597 const TensorInfo& weights,
598 const Optional<TensorInfo>& biases,
599 Optional<std::string&> reasonIfUnsupported) const
600{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100601 bool supported = true;
602
603 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000604 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100605 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000606 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100607 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100608 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000609 DataType::QSymmS8,
610 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000611 DataType::QAsymmU8,
612 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100613 };
614
615 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
616 "Reference DepthwiseConvolution2d: input is not a supported type.");
617
618 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
619 "Reference DepthwiseConvolution2d: output is not a supported type.");
620
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100621 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
622 "Reference DepthwiseConvolution2d: input and output types mismatched.");
623
Derek Lambertid466a542020-01-22 15:37:29 +0000624 ARMNN_NO_DEPRECATE_WARN_BEGIN
625 std::array<DataType, 3> supportedWeightTypes =
626 {
627 DataType::QAsymmU8,
628 DataType::QSymmS8,
629 DataType::QuantizedSymm8PerAxis // deprecated
630 };
631 ARMNN_NO_DEPRECATE_WARN_END
632
Teresa Charlind8df0262019-11-11 12:28:15 +0000633 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000634 if (IsQuantized8BitType(inputType))
Teresa Charlind8df0262019-11-11 12:28:15 +0000635 {
Teresa Charlind8df0262019-11-11 12:28:15 +0000636
637 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
638 "Reference convolution2d: weights type not supported for quantized input.");
639 }
640 else
641 {
642 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
643 "Reference DepthwiseConvolution2d: weights is not a supported type.");
644
645 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
646 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
647 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100648
649 if (biases.has_value())
650 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000651 std::array<DataType,4> biasesSupportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100652 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000653 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100654 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100655 DataType::Float16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100656 DataType::Signed32
657 };
658 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
659 "Reference DepthwiseConvolution2d: biases is not a supported type.");
660 }
Jan Eilers8eb25602020-03-09 12:13:48 +0000661 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100662
663 return supported;
664
arovir011c7c81b2018-10-08 11:34:28 +0100665}
666
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000667bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
668 const TensorInfo& output,
669 Optional<std::string&> reasonIfUnsupported) const
670{
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100671 bool supported = true;
672
Ryan OShea9add1202020-02-07 10:06:33 +0000673 std::array<DataType,4> supportedInputTypes = {
674 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000675 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +0000676 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000677 DataType::QSymmS16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100678 };
679
680 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000681 "Reference for Dequantize layer: input type not supported.");
682
683 supported &= CheckSupportRule( TypeNotPerAxisQuantized(input), reasonIfUnsupported,
684 "Reference for Dequantize layer: per-axis quantized input not support .");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100685
Derek Lambertid466a542020-01-22 15:37:29 +0000686 supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
687 "Reference dequantize: per-axis quantized input not support .");
688
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000689 std::array<DataType,3> supportedOutputTypes = {
690 DataType::BFloat16,
Jan Eilersf7107932019-11-01 11:09:36 +0000691 DataType::Float32,
692 DataType::Float16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100693 };
694
695 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000696 "Reference for Dequantize layer: output type not supported.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100697
698 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000699 "Reference for Dequantize layer: input/output shapes have different num total "
700 "elements.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100701
702 return supported;
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000703}
704
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000705bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
706 const TensorInfo& scores,
707 const TensorInfo& anchors,
708 const TensorInfo& detectionBoxes,
709 const TensorInfo& detectionClasses,
710 const TensorInfo& detectionScores,
711 const TensorInfo& numDetections,
712 const DetectionPostProcessDescriptor& descriptor,
713 Optional<std::string&> reasonIfUnsupported) const
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000714{
Jan Eilers8eb25602020-03-09 12:13:48 +0000715 IgnoreUnused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
Derek Lamberti901ea112019-12-10 22:07:09 +0000716
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100717 bool supported = true;
718
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000719 std::array<DataType,4> supportedInputTypes =
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100720 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000721 DataType::BFloat16,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100722 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000723 DataType::QAsymmU8,
724 DataType::QSymmS16
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100725 };
726
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000727 supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100728 "Reference DetectionPostProcess: input 0 is not a supported type.");
729
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000730 supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100731 "Reference DetectionPostProcess: input 1 is not a supported type.");
732
733 return supported;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000734}
735
Pablo Tellof0bd6832019-04-26 17:58:13 +0100736bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
737 const TensorInfo& output,
738 const DepthwiseConvolution2dDescriptor& descriptor,
739 const TensorInfo& weights,
740 const Optional<TensorInfo>& biases,
741 Optional<std::string&> reasonIfUnsupported) const
742{
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100743 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
Pablo Tellof0bd6832019-04-26 17:58:13 +0100744}
745
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100746bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
arovir011c7c81b2018-10-08 11:34:28 +0100747 const TensorInfo& input1,
748 const TensorInfo& output,
749 Optional<std::string&> reasonIfUnsupported) const
750{
Sadik Armagan2999a022019-04-09 14:20:12 +0100751 bool supported = true;
752
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000753 std::array<DataType,5> supportedTypes = {
754 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +0100755 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100756 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000757 DataType::QAsymmU8,
758 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +0100759 };
760
761 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
762 "Reference division: input 0 is not a supported type.");
763
764 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
765 "Reference division: input 1 is not a supported type.");
766
767 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
768 "Reference division: output is not a supported type.");
769
770 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
771 "Reference division: input 0 and Input 1 types are mismatched");
772
773 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
774 "Reference division: input and output types are mismatched");
775
776 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
777 "Reference division: shapes are not suitable for implicit broadcast.");
778
779 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100780}
781
josh minor4a3c6102020-01-06 16:40:46 -0600782bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
783 const TensorInfo& output,
784 const ElementwiseUnaryDescriptor& descriptor,
785 Optional<std::string&> reasonIfUnsupported) const
786{
Jan Eilers8eb25602020-03-09 12:13:48 +0000787 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600788
Sadik Armaganac472102020-03-24 09:54:36 +0000789 std::array<DataType, 6> supportedTypes =
josh minor4a3c6102020-01-06 16:40:46 -0600790 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000791 DataType::BFloat16,
josh minor4a3c6102020-01-06 16:40:46 -0600792 DataType::Float32,
793 DataType::Float16,
794 DataType::QAsymmU8,
Sadik Armaganac472102020-03-24 09:54:36 +0000795 DataType::QSymmS16,
796 DataType::Signed32
josh minor4a3c6102020-01-06 16:40:46 -0600797 };
798
799 bool supported = true;
800
801 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
802 "Reference elementwise unary: input type not supported");
803
804 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
805 "Reference elementwise unary: output type not supported");
806
807 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
808 "Reference elementwise unary: input and output types not matching");
809
810 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
811 "Reference elementwise unary: input and output shapes"
812 "have different number of total elements");
813
814 return supported;
815}
816
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000817bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
818 const TensorInfo& input1,
819 const TensorInfo& output,
820 Optional<std::string&> reasonIfUnsupported) const
821{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100822 return IsComparisonSupported(input0,
823 input1,
824 output,
825 ComparisonDescriptor(ComparisonOperation::Equal),
826 reasonIfUnsupported);
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000827}
828
arovir011c7c81b2018-10-08 11:34:28 +0100829bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
830 const FakeQuantizationDescriptor& descriptor,
831 Optional<std::string&> reasonIfUnsupported) const
832{
Jan Eilers8eb25602020-03-09 12:13:48 +0000833 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100834 bool supported = true;
835
836 std::array<DataType,1> supportedTypes =
837 {
838 DataType::Float32
839 };
840
841 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
842 "Reference fake quantization: input type not supported.");
843
844 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100845}
846
847bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
848 const TensorInfo& output,
849 Optional<std::string&> reasonIfUnsupported) const
850{
Jan Eilers8eb25602020-03-09 12:13:48 +0000851 IgnoreUnused(output);
James Conroy83735b12019-05-30 16:36:59 +0100852 bool supported = true;
853
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000854 std::array<DataType,4> supportedTypes =
James Conroy83735b12019-05-30 16:36:59 +0100855 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000856 DataType::BFloat16,
James Conroyb40d7102019-06-04 12:32:09 +0100857 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100858 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000859 DataType::QSymmS16
James Conroy83735b12019-05-30 16:36:59 +0100860 };
861
862 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
863 "Reference Floor: input type not supported.");
864
865 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
866 "Reference Floor: output type not supported.");
867
868 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100869}
870
871bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
872 const TensorInfo& output,
873 const TensorInfo& weights,
874 const TensorInfo& biases,
875 const FullyConnectedDescriptor& descriptor,
876 Optional<std::string&> reasonIfUnsupported) const
877{
Francis Murtagh46c09d02019-05-28 08:15:28 +0100878 bool supported = true;
879
880 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000881 std::array<DataType,6> supportedTypes =
Francis Murtagh46c09d02019-05-28 08:15:28 +0100882 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000883 DataType::BFloat16,
884 DataType::Float32,
885 DataType::Float16,
886 DataType::QAsymmU8,
887 DataType::QAsymmS8,
888 DataType::QSymmS16
Francis Murtagh46c09d02019-05-28 08:15:28 +0100889 };
890
891 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
892 "Reference Fully Connected: input type not supported.");
893
894 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
895 "Reference Fully Connected: output type not supported.");
896
Francis Murtagh46c09d02019-05-28 08:15:28 +0100897 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
898 "Reference Fully Connected: weights type not supported.");
899
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000900 // For FullyConnected, we allow to have BFloat16 input with Float32 output for optimization.
901 if (input.GetDataType() == DataType::BFloat16)
902 {
903 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
904 {
905 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
906 supported = false;
907 }
908 }
909 else
910 {
911 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
912 "Reference Fully Connected: input and output types mismatched.");
913 }
914
Francis Murtaghddb1d062020-03-10 13:51:45 +0000915 ARMNN_NO_DEPRECATE_WARN_BEGIN
916 std::array<DataType, 3> supportedWeightTypes =
917 {
918 DataType::QAsymmU8,
919 DataType::QSymmS8,
920 DataType::QuantizedSymm8PerAxis // deprecated
921 };
922 ARMNN_NO_DEPRECATE_WARN_END
923
924 if (IsQuantized8BitType(input.GetDataType()))
925 {
926
927 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
928 "Reference Fully Connected: weights type not supported for quantized input.");
929 }
930 else
931 {
932 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
933 "Reference Fully Connected: weights is not a supported type.");
934
935 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
936 "Reference Fully Connected: input and weights types mismatched.");
937 }
Francis Murtagh46c09d02019-05-28 08:15:28 +0100938
939 if (descriptor.m_BiasEnabled)
940 {
941 // Defined supported types for bias
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000942 std::array<DataType, 4>
Francis Murtagh46c09d02019-05-28 08:15:28 +0100943 supportedBiasTypes =
944 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000945 DataType::BFloat16,
Francis Murtagh46c09d02019-05-28 08:15:28 +0100946 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100947 DataType::Float16,
Francis Murtagh46c09d02019-05-28 08:15:28 +0100948 DataType::Signed32
949 };
950
951 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
952 "Reference Fully Connected: bias type not supported.");
953
954 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
955 "Reference Fully Connected: bias and weight types mismatch.");
956
957 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
958 "Reference Fully Connected: bias type inferred from weights is incompatible.");
959
960 }
961
962 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100963}
964
narpra014951d842019-01-18 16:53:53 +0000965bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
966 const armnn::TensorInfo& input1,
967 const armnn::TensorInfo& output,
968 armnn::Optional<std::string&> reasonIfUnsupported) const
969{
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100970 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000971 std::array<DataType,5> supportedTypes =
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100972 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000973 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100974 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100975 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000976 DataType::QAsymmU8,
977 DataType::QSymmS16
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100978 };
979
980 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
981 "Reference Gather: input type not supported");
982
983 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
984 "Reference Gather: output type not supported");
985
986 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
987 "Reference Gather: indices (input1) type not supported");
988
989 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
990 "Reference Gather: input and output types not matching");
991
992 return supported;
narpra014951d842019-01-18 16:53:53 +0000993}
994
FrancisMurtagh878f0232018-12-19 10:56:15 +0000995bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
996 const TensorInfo& input1,
997 const TensorInfo& output,
998 Optional<std::string&> reasonIfUnsupported) const
999{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001000 return IsComparisonSupported(input0,
1001 input1,
1002 output,
1003 ComparisonDescriptor(ComparisonOperation::Greater),
1004 reasonIfUnsupported);
FrancisMurtagh878f0232018-12-19 10:56:15 +00001005}
1006
Derek Lamberti901ea112019-12-10 22:07:09 +00001007bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
1008 Optional<std::string&> /*reasonIfUnsupported*/) const
arovir011c7c81b2018-10-08 11:34:28 +01001009{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001010 return true;
arovir011c7c81b2018-10-08 11:34:28 +01001011}
1012
Kevin May09ca49c2019-10-09 12:37:34 +01001013bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
1014 const TensorInfo& output,
1015 const InstanceNormalizationDescriptor& descriptor,
1016 Optional<std::string&> reasonIfUnsupported) const
1017{
Jan Eilers8eb25602020-03-09 12:13:48 +00001018 IgnoreUnused(descriptor);
Kevin May09ca49c2019-10-09 12:37:34 +01001019 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001020 std::array<DataType, 3> supportedTypes =
Kevin May09ca49c2019-10-09 12:37:34 +01001021 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001022 DataType::BFloat16,
Kevin May09ca49c2019-10-09 12:37:34 +01001023 DataType::Float32,
1024 DataType::Float16
1025 };
1026
1027 bool supported = true;
1028
1029 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1030 "Reference Instance Normalization: input type not supported.");
1031
1032 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1033 "Reference Instance Normalization: output type not supported.");
1034
1035 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1036 "Reference Instance Normalization: input and output types mismatched.");
1037
1038 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1039 "Reference Instance Normalization: input and output shapes have different "
1040 "num total elements.");
1041
1042 return supported;
1043}
1044
arovir011c7c81b2018-10-08 11:34:28 +01001045bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
1046 const TensorInfo& output,
1047 const L2NormalizationDescriptor& descriptor,
1048 Optional<std::string&> reasonIfUnsupported) const
1049{
Jan Eilers8eb25602020-03-09 12:13:48 +00001050 IgnoreUnused(descriptor);
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001051 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001052 std::array<DataType, 5> supportedTypes =
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001053 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001054 DataType::BFloat16,
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001055 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001056 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001057 DataType::QAsymmU8,
1058 DataType::QSymmS16
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001059 };
1060
1061 bool supported = true;
1062
1063 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1064 "Reference L2normalization: input type not supported.");
1065
1066 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1067 "Reference L2normalization: output type not supported.");
1068
1069 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1070 "Reference L2normalization: input and output types mismatched.");
1071
1072 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1073 "Reference L2normalization: input and output shapes have different "
1074 "num total elements.");
1075
1076 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001077}
1078
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001079bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
1080 const TensorInfo& output,
1081 const LogSoftmaxDescriptor& descriptor,
1082 Optional<std::string&> reasonIfUnsupported) const
1083{
Jan Eilers8eb25602020-03-09 12:13:48 +00001084 IgnoreUnused(descriptor);
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001085
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001086 std::array<DataType, 3> supportedTypes =
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001087 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001088 DataType::BFloat16,
1089 DataType::Float32,
1090 DataType::Float16
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001091 };
1092
1093 bool supported = true;
1094 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1095 "Reference LogSoftmax: input type not supported");
1096
1097 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1098 "Reference LogSoftmax: output type not supported");
1099
1100 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1101 "Reference LogSoftmax: input and output types do not match");
1102
1103 return supported;
1104}
1105
arovir011c7c81b2018-10-08 11:34:28 +01001106bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1107 const TensorInfo& outputStateIn,
1108 const TensorInfo& cellStateIn,
1109 const TensorInfo& scratchBuffer,
1110 const TensorInfo& outputStateOut,
1111 const TensorInfo& cellStateOut,
1112 const TensorInfo& output,
1113 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001114 const LstmInputParamsInfo& paramsInfo,
1115 Optional<std::string&> reasonIfUnsupported) const
arovir011c7c81b2018-10-08 11:34:28 +01001116{
Jan Eilers8eb25602020-03-09 12:13:48 +00001117 IgnoreUnused(descriptor);
1118 IgnoreUnused(paramsInfo);
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001119
1120 bool supported = true;
1121
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001122 std::array<DataType,3> supportedTypes = {
1123 DataType::BFloat16,
Conor Kennedyb9971c92019-05-07 07:14:23 +01001124 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001125 DataType::QSymmS16
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001126 };
1127
Jan Eilersd01a83c2019-07-03 18:20:40 +01001128 // check inputs and outputs
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001129 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1130 "Reference Lstm: input is not a supported type.");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001131 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1132 "Reference Lstm: input and outputStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001133 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1134 "Reference Lstm: input and cellStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001135 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1136 "Reference Lstm: input and scratchBuffer types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001137 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1138 "Reference Lstm: input and outputStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001139 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1140 "Reference Lstm: input and cellStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001141 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1142 "Reference Lstm: input and output types are mismatched");
Jan Eilersd01a83c2019-07-03 18:20:40 +01001143 // check layer parameters
Francis Murtaghbb590b42019-08-14 09:51:36 +01001144 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001145 "Reference Lstm: input and InputToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001146 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001147 "Reference Lstm: input and InputToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001148 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001149 "Reference Lstm: input and InputToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001150 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001151 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001152 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001153 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001154 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001155 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001156 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001157 "Reference Lstm: input and ForgetGateBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001158 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001159 "Reference Lstm: input and CellBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001160 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001161 "Reference Lstm: input and OutputGateBias types are mismatched");
1162 if (!descriptor.m_CifgEnabled)
1163 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001164 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001165 "Reference Lstm: input and InputToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001166 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001167 reasonIfUnsupported,
1168 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001169 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001170 "Reference Lstm: input and InputGateBias types are mismatched");
1171 if (descriptor.m_PeepholeEnabled)
1172 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001173 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001174 reasonIfUnsupported,
1175 "Reference Lstm: input and CellToInputWeights types are mismatched");
1176 }
1177 }
1178 if (descriptor.m_PeepholeEnabled)
1179 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001180 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001181 "Reference Lstm: input and CellToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001182 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001183 "Reference Lstm: input and CellToOutputWeights types are mismatched");
1184 }
1185 if (descriptor.m_ProjectionEnabled)
1186 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001187 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001188 "Reference Lstm: input and mProjectionWeights types are mismatched");
1189 if (paramsInfo.m_ProjectionBias != nullptr)
1190 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001191 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001192 "Reference Lstm: input and ProjectionBias types are mismatched");
1193 }
1194 }
1195 if (descriptor.m_LayerNormEnabled)
1196 {
1197 if (!descriptor.m_CifgEnabled)
1198 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001199 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001200 reasonIfUnsupported,
1201 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1202 }
Francis Murtaghbb590b42019-08-14 09:51:36 +01001203 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001204 reasonIfUnsupported,
1205 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001206 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001207 reasonIfUnsupported,
1208 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001209 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001210 reasonIfUnsupported,
1211 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1212 }
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001213
1214 return supported;
telsoa01c577f2c2018-08-31 09:22:23 +01001215}
1216
saoste012df12b32018-11-28 16:57:20 +00001217bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1218 const TensorInfo& input1,
1219 const TensorInfo& output,
1220 Optional<std::string&> reasonIfUnsupported) const
1221{
Sadik Armagan2999a022019-04-09 14:20:12 +01001222 bool supported = true;
1223
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001224 std::array<DataType,6> supportedTypes = {
1225 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001226 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001227 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001228 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001229 DataType::QAsymmU8,
1230 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001231 };
1232
1233 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1234 "Reference maximum: input 0 is not a supported type.");
1235
1236 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1237 "Reference maximum: input 1 is not a supported type.");
1238
1239 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1240 "Reference maximum: output is not a supported type.");
1241
1242 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1243 "Reference maximum: input 0 and Input 1 types are mismatched");
1244
1245 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1246 "Reference maximum: input and output types are mismatched");
1247
1248 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1249 "Reference maximum: shapes are not suitable for implicit broadcast.");
1250
1251 return supported;
saoste012df12b32018-11-28 16:57:20 +00001252}
1253
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001254bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1255 const TensorInfo& output,
1256 const MeanDescriptor& descriptor,
1257 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001258{
James Conroy4d1ff582019-06-10 17:06:39 +01001259 bool supported = true;
1260 std::string meanLayerStr = "Mean";
1261 std::string outputTensorStr = "output";
1262
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001263 std::array<DataType,5> supportedTypes =
James Conroy4d1ff582019-06-10 17:06:39 +01001264 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001265 DataType::BFloat16,
James Conroy4d1ff582019-06-10 17:06:39 +01001266 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001267 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001268 DataType::QAsymmU8,
1269 DataType::QSymmS16
James Conroy4d1ff582019-06-10 17:06:39 +01001270 };
1271
1272 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1273 "Reference Mean: input type not supported.");
1274
1275 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1276 "Reference Mean: input and output types are mismatched");
1277
1278 if (descriptor.m_KeepDims)
1279 {
1280 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1281 reasonIfUnsupported,
1282 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1283 output.GetNumDimensions(),
1284 meanLayerStr, outputTensorStr).data());
1285 }
1286 else if (descriptor.m_Axis.empty())
1287 {
1288 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1289 reasonIfUnsupported,
1290 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1291 meanLayerStr, outputTensorStr).data());
1292 }
1293 else
1294 {
1295 auto outputDim = input.GetNumDimensions() - boost::numeric_cast<unsigned int>(descriptor.m_Axis.size());
1296
1297 if (outputDim > 0)
1298 {
1299 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1300 reasonIfUnsupported,
1301 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1302 meanLayerStr, outputTensorStr).data());
1303 }
1304 else
1305 {
1306 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1307 reasonIfUnsupported,
1308 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1309 meanLayerStr, outputTensorStr).data());
1310 }
1311 }
1312
1313 return supported;
narpra0132b90462018-09-13 11:07:48 +01001314}
1315
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001316bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
Nikhil Raj8599a412018-11-19 14:51:07 +00001317 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +01001318 const MergerDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001319 Optional<std::string&> reasonIfUnsupported) const
1320{
Jim Flynne242f2d2019-05-22 14:24:13 +01001321 return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001322}
1323
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001324bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1325 const TensorInfo &output,
1326 Optional<std::string &> reasonIfUnsupported) const
1327{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001328 bool supported = true;
1329
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001330 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001331 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001332 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001333 DataType::Float32,
1334 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001335 DataType::QAsymmU8,
1336 DataType::QSymmS16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001337 DataType::Boolean
1338 };
1339
1340 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1341 "Reference MemCopy: input type not supported");
1342
1343 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1344 "Reference MemCopy: output type not supported");
1345
1346 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1347 "Reference MemCopy: input and output types are mismatched");
1348
1349 return supported;
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001350}
1351
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001352bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1353 const TensorInfo& input1,
1354 const TensorInfo& output,
1355 Optional<std::string&> reasonIfUnsupported) const
1356{
Sadik Armagan2999a022019-04-09 14:20:12 +01001357 bool supported = true;
1358
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001359 std::array<DataType,5> supportedTypes = {
1360 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001361 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001362 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001363 DataType::QAsymmU8,
1364 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001365 };
1366
1367 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1368 "Reference minimum: input 0 is not a supported type.");
1369
1370 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1371 "Reference minimum: input 1 is not a supported type.");
1372
1373 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1374 "Reference minimum: output is not a supported type.");
1375
1376 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1377 "Reference minimum: input 0 and Input 1 types are mismatched");
1378
1379 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1380 "Reference minimum: input and output types are mismatched");
1381
1382 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1383 "Reference minimum: shapes are not suitable for implicit broadcast.");
1384
1385 return supported;
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001386}
1387
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001388bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1389 const TensorInfo& input1,
1390 const TensorInfo& output,
1391 Optional<std::string&> reasonIfUnsupported) const
1392{
Sadik Armagan2999a022019-04-09 14:20:12 +01001393 bool supported = true;
1394
Keith Davis67e6c542020-02-19 10:08:33 +00001395 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001396 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001397 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001398 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001399 DataType::QAsymmU8,
Keith Davis67e6c542020-02-19 10:08:33 +00001400 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001401 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001402 };
1403
1404 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1405 "Reference multiplication: input 0 is not a supported type.");
1406
1407 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1408 "Reference multiplication: input 1 is not a supported type.");
1409
1410 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1411 "Reference multiplication: output is not a supported type.");
1412
1413 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1414 "Reference multiplication: input 0 and Input 1 types are mismatched");
1415
1416 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1417 "Reference multiplication: input and output types are mismatched");
1418
1419 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1420 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1421
1422 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001423}
1424
1425bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1426 const TensorInfo& output,
1427 const NormalizationDescriptor& descriptor,
1428 Optional<std::string&> reasonIfUnsupported) const
Nina Drozd661dfa72018-10-02 11:14:17 +01001429{
Jan Eilers8eb25602020-03-09 12:13:48 +00001430 IgnoreUnused(descriptor);
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001431
1432 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001433 std::array<DataType, 5> supportedTypes =
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001434 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001435 DataType::BFloat16,
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001436 DataType::Float16,
1437 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001438 DataType::QAsymmU8,
1439 DataType::QSymmS16
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001440 };
1441
1442 bool supported = true;
1443
1444 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1445 "Reference normalization: input type not supported.");
1446
1447 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1448 "Reference normalization: output type not supported.");
1449
1450 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1451 "Reference normalization: input and output shapes have different "
1452 "num total elements.");
1453
1454 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001455}
1456
Derek Lamberti901ea112019-12-10 22:07:09 +00001457bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1458 Optional<std::string&> /*reasonIfUnsupported*/) const
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001459{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001460 return true;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001461}
1462
1463bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1464 const TensorInfo& output,
1465 const PadDescriptor& descriptor,
1466 Optional<std::string&> reasonIfUnsupported) const
1467{
Jan Eilers8eb25602020-03-09 12:13:48 +00001468 IgnoreUnused(descriptor);
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001469 bool supported = true;
1470
1471 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001472 std::array<DataType,5> supportedTypes =
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001473 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001474 DataType::BFloat16,
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001475 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001476 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001477 DataType::QAsymmU8,
1478 DataType::QSymmS16
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001479 };
1480
1481 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1482 "Reference pad: input is not a supported type.");
1483
1484 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1485 "Reference pad: output is not a supported type.");
1486
1487 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1488 "Reference pad: input and output types are mismatched.");
1489
1490 return supported;
Nina Drozd661dfa72018-10-02 11:14:17 +01001491}
1492
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001493bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1494 const TensorInfo& output,
1495 const PermuteDescriptor& descriptor,
1496 Optional<std::string&> reasonIfUnsupported) const
1497{
Jan Eilers8eb25602020-03-09 12:13:48 +00001498 IgnoreUnused(descriptor);
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001499 bool supported = true;
1500
1501 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001502 std::array<DataType, 5> supportedTypes =
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001503 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001504 DataType::BFloat16,
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001505 DataType::Float32,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001506 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001507 DataType::QAsymmU8,
1508 DataType::QSymmS16
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001509 };
1510
1511 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1512 "Reference permute: input is not a supported type.");
1513
1514 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1515 "Reference permute: output is not a supported type.");
1516
1517 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1518 "Reference permute: input and output types are mismatched.");
1519
1520 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001521}
1522
1523bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1524 const TensorInfo& output,
1525 const Pooling2dDescriptor& descriptor,
1526 Optional<std::string&> reasonIfUnsupported) const
1527{
Jan Eilers8eb25602020-03-09 12:13:48 +00001528 IgnoreUnused(descriptor);
Teresa Charlina3b20472019-06-06 11:12:32 +01001529 bool supported = true;
1530
1531 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001532 std::array<DataType,6> supportedTypes =
Teresa Charlina3b20472019-06-06 11:12:32 +01001533 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001534 DataType::BFloat16,
Teresa Charlina3b20472019-06-06 11:12:32 +01001535 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001536 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001537 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001538 DataType::QAsymmU8,
1539 DataType::QSymmS16
Teresa Charlina3b20472019-06-06 11:12:32 +01001540 };
1541
1542 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1543 "Reference poolind2d: input is not a supported type.");
1544
1545 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1546 "Reference poolind2d: output is not a supported type.");
1547
1548 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1549 "Reference poolind2d: input and output types are mismatched.");
1550
1551 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001552}
1553
Derek Lamberti5f400d62019-03-25 15:41:58 +00001554bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1555 const TensorInfo& output,
1556 Optional<std::string&> reasonIfUnsupported) const
1557{
1558 bool supported = true;
1559
Finn Williamsfd271062019-12-04 14:27:27 +00001560 // Define supported input types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001561 std::array<DataType,7> supportedInputTypes = {
1562 DataType::BFloat16,
Keith Davis5e51cd82020-01-29 16:52:59 +00001563 DataType::Float32,
Keith Davis3d8bc972020-02-04 09:31:47 +00001564 DataType::Float16,
Ryan OShea9add1202020-02-07 10:06:33 +00001565 DataType::QAsymmS8,
Keith Davis5e51cd82020-01-29 16:52:59 +00001566 DataType::QAsymmU8,
1567 DataType::QSymmS8,
1568 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001569 };
1570
1571 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1572 "Reference quantize: input type not supported.");
1573
1574 // Define supported output types.
Ryan OShea9add1202020-02-07 10:06:33 +00001575 std::array<DataType,4> supportedOutputTypes = {
Derek Lambertif90c56d2020-01-10 17:14:08 +00001576 DataType::QAsymmU8,
Ryan OShea9add1202020-02-07 10:06:33 +00001577 DataType::QAsymmS8,
Finn Williamsfd271062019-12-04 14:27:27 +00001578 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001579 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001580 };
1581 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1582 "Reference quantize: output type not supported.");
1583
1584 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1585 "Reference quantize: input and output shapes have different num total elements.");
1586
1587 return supported;
1588}
1589
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001590bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001591 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001592 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001593 Optional<std::string&> reasonIfUnsupported) const
1594{
Jan Eilers8eb25602020-03-09 12:13:48 +00001595 IgnoreUnused(output);
1596 IgnoreUnused(descriptor);
Nina Drozd2f2778f2019-05-27 10:37:05 +01001597 // Define supported output types.
Keith Davis0c2eeac2020-02-11 16:51:50 +00001598 std::array<DataType,7> supportedOutputTypes =
Nina Drozd2f2778f2019-05-27 10:37:05 +01001599 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001600 DataType::BFloat16,
Nina Drozd2f2778f2019-05-27 10:37:05 +01001601 DataType::Float32,
1602 DataType::Float16,
Narumol Prangnawarat0718ee92019-09-13 16:53:38 +01001603 DataType::Signed32,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001604 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001605 DataType::QAsymmU8,
1606 DataType::QSymmS16
Nina Drozd2f2778f2019-05-27 10:37:05 +01001607 };
Keith Davis0c2eeac2020-02-11 16:51:50 +00001608
Nina Drozd2f2778f2019-05-27 10:37:05 +01001609 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1610 "Reference reshape: input type not supported.");
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001611}
1612
1613bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
Sadik Armaganc625f002018-12-17 11:32:16 +00001614 const TensorInfo& output,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001615 Optional<std::string&> reasonIfUnsupported) const
1616{
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001617 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001618 std::array<DataType,5> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001619 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001620 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001621 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001622 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001623 DataType::QAsymmU8,
1624 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001625 };
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001626
1627 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1628 "Reference ResizeBilinear: input type not supported");
1629
1630 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1631 "Reference ResizeBilinear: output type not supported");
1632
1633 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1634 "Reference ResizeBilinear: input and output types not matching");
1635
1636 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001637}
1638
Teresa Charlin970f43b2019-07-01 13:51:07 +01001639bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1640 const TensorInfo& output,
1641 const ResizeDescriptor& descriptor,
1642 Optional<std::string&> reasonIfUnsupported) const
1643{
Jan Eilers8eb25602020-03-09 12:13:48 +00001644 IgnoreUnused(descriptor);
Teresa Charlin970f43b2019-07-01 13:51:07 +01001645 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001646 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001647 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001648 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001649 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001650 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001651 DataType::QAsymmU8,
Keith Davis67e6c542020-02-19 10:08:33 +00001652 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001653 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001654 };
1655
1656 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1657 "Reference Resize: input type not supported");
1658
1659 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1660 "Reference Resize: output type not supported");
1661
1662 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1663 "Reference Resize: input and output types not matching");
1664
1665 return supported;
1666}
1667
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001668bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
1669 const TensorInfo& output,
1670 Optional<std::string&> reasonIfUnsupported) const
1671{
josh minor4a3c6102020-01-06 16:40:46 -06001672 return IsElementwiseUnarySupported(input,
1673 output,
1674 ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt),
1675 reasonIfUnsupported);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001676}
1677
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001678bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1679 const TensorInfo& output,
1680 const SliceDescriptor& descriptor,
1681 Optional<std::string&> reasonIfUnsupported) const
1682{
Jan Eilers8eb25602020-03-09 12:13:48 +00001683 IgnoreUnused(descriptor);
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001684 bool supported = true;
1685
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001686 std::array<DataType, 4> supportedTypes =
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001687 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001688 DataType::BFloat16,
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001689 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001690 DataType::QAsymmU8,
1691 DataType::QSymmS16
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001692 };
1693
1694 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1695 "Reference Slice: input type not supported");
1696
1697 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1698 "Reference Slice: output type not supported");
1699
1700 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1701 "Reference Slice: input and output types are mismatched");
1702
1703 return supported;
1704}
1705
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001706bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1707 const TensorInfo& output,
1708 const SoftmaxDescriptor& descriptor,
1709 Optional<std::string&> reasonIfUnsupported) const
1710{
Jan Eilers8eb25602020-03-09 12:13:48 +00001711 IgnoreUnused(descriptor);
nikraj01248683f2019-05-29 16:46:50 +01001712 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001713 std::array<DataType,7> supportedTypes =
nikraj01248683f2019-05-29 16:46:50 +01001714 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001715 DataType::BFloat16,
1716 DataType::Float32,
1717 DataType::Float16,
1718 DataType::QSymmS8,
1719 DataType::QAsymmS8,
1720 DataType::QAsymmU8,
1721 DataType::QSymmS16
nikraj01248683f2019-05-29 16:46:50 +01001722 };
1723
1724 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001725 "Reference Softmax: output type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001726
1727 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001728 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001729
1730 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001731 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001732
1733 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001734}
1735
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001736bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1737 const TensorInfo& output,
1738 const SpaceToBatchNdDescriptor& descriptor,
1739 Optional<std::string&> reasonIfUnsupported) const
1740{
Jan Eilers8eb25602020-03-09 12:13:48 +00001741 IgnoreUnused(descriptor);
nikraj01120522a2019-05-31 11:33:07 +01001742 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001743 std::array<DataType,5> supportedTypes =
nikraj01120522a2019-05-31 11:33:07 +01001744 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001745 DataType::BFloat16,
1746 DataType::Float32,
1747 DataType::Float16,
1748 DataType::QAsymmU8,
1749 DataType::QSymmS16
nikraj01120522a2019-05-31 11:33:07 +01001750 };
1751
1752 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1753 "Reference SpaceToBatchNd: input type not supported");
1754
1755 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1756 "Reference SpaceToBatchNd: output type not supported");
1757
1758 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1759 "Reference SpaceToBatchNd: input and output types are mismatched");
1760
1761 return supported;
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001762}
1763
Keith Davisa57eccb2019-06-14 17:33:22 +01001764bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
Keith Davis51910332019-06-26 15:28:43 +01001765 const TensorInfo& output,
1766 const SpaceToDepthDescriptor& descriptor,
1767 Optional<std::string&> reasonIfUnsupported) const
Keith Davisa57eccb2019-06-14 17:33:22 +01001768{
1769
Jan Eilers8eb25602020-03-09 12:13:48 +00001770 IgnoreUnused(descriptor);
Keith Davisa57eccb2019-06-14 17:33:22 +01001771 bool supported = true;
1772
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001773 std::array<DataType,5> supportedTypes =
Keith Davisa57eccb2019-06-14 17:33:22 +01001774 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001775 DataType::BFloat16,
Keith Davisa57eccb2019-06-14 17:33:22 +01001776 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001777 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001778 DataType::QAsymmU8,
1779 DataType::QSymmS16
Keith Davisa57eccb2019-06-14 17:33:22 +01001780 };
1781
1782 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1783 "Reference SpaceToDepth: input type not supported");
1784
1785 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1786 "Reference SpaceToDepth: output type not supported");
1787
1788 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1789 "Reference SpaceToDepth: input and output types are mismatched");
1790
1791 return supported;
1792}
1793
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001794bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1795 const ViewsDescriptor& descriptor,
1796 Optional<std::string&> reasonIfUnsupported) const
1797{
Jan Eilers8eb25602020-03-09 12:13:48 +00001798 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001799 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001800 std::array<DataType,5> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001801 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001802 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001803 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001804 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001805 DataType::QAsymmU8,
1806 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001807 };
1808
1809 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1810 "Reference splitter: input type not supported");
1811
1812 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001813}
1814
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001815bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1816 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1817 const ViewsDescriptor& descriptor,
1818 Optional<std::string&> reasonIfUnsupported) const
1819{
Jan Eilers8eb25602020-03-09 12:13:48 +00001820 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001821 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001822 std::array<DataType,5> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001823 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001824 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001825 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001826 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001827 DataType::QAsymmU8,
1828 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001829 };
1830
1831 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1832 "Reference splitter: output type not supported");
1833 for (const TensorInfo output : outputs)
1834 {
1835 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1836 "Reference splitter: input type not supported");
1837
1838 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1839 "Reference splitter: input and output types mismatched.");
1840 }
1841
1842 return supported;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001843}
1844
Matthew Jackson81e601c2019-07-11 12:07:09 +01001845bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1846 const TensorInfo& output,
1847 const StackDescriptor& descriptor,
1848 Optional<std::string&> reasonIfUnsupported) const
1849{
Jan Eilers8eb25602020-03-09 12:13:48 +00001850 IgnoreUnused(descriptor);
Matthew Jackson81e601c2019-07-11 12:07:09 +01001851
1852 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001853 std::array<DataType,5> supportedTypes =
Matthew Jackson81e601c2019-07-11 12:07:09 +01001854 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001855 DataType::BFloat16,
Matthew Jackson81e601c2019-07-11 12:07:09 +01001856 DataType::Float32,
Matthew Jacksone69c3992019-09-09 14:31:21 +01001857 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001858 DataType::QAsymmU8,
1859 DataType::QSymmS16
Matthew Jackson81e601c2019-07-11 12:07:09 +01001860 };
1861
1862 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1863 "Reference stack: output type not supported");
1864 for (const TensorInfo* input : inputs)
1865 {
1866 BOOST_ASSERT(input != nullptr);
1867 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
1868 "Reference stack: input type not supported");
1869
1870 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
1871 "Reference stack: input and output types mismatched.");
1872 }
1873
1874 return supported;
1875}
1876
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00001877bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
1878 const TensorInfo& output,
1879 const StridedSliceDescriptor& descriptor,
1880 Optional<std::string&> reasonIfUnsupported) const
1881{
Jan Eilers8eb25602020-03-09 12:13:48 +00001882 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001883 bool supported = true;
1884
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001885 std::array<DataType,4> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001886 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001887 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001888 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001889 DataType::QAsymmU8,
1890 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001891 };
1892
1893 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1894 "Reference StridedSlice: input type not supported");
1895
1896 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1897 "Reference StridedSlice: output type not supported");
1898
1899 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1900 "Reference StridedSlice: input and output types are mismatched");
1901
1902 return supported;
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00001903}
1904
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001905bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
1906 const TensorInfo& input1,
1907 const TensorInfo& output,
1908 Optional<std::string&> reasonIfUnsupported) const
1909{
Sadik Armagan2999a022019-04-09 14:20:12 +01001910 bool supported = true;
1911
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001912 std::array<DataType,5> supportedTypes = {
1913 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001914 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001915 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001916 DataType::QAsymmU8,
1917 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001918 };
1919
1920 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1921 "Reference subtraction: input 0 is not a supported type.");
1922
1923 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1924 "Reference subtraction: input 1 is not a supported type.");
1925
1926 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1927 "Reference subtraction: output is not a supported type.");
1928
1929 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1930 "Reference subtraction: input 0 and Input 1 types are mismatched");
1931
1932 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1933 "Reference subtraction: input and output types are mismatched");
1934
1935 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1936 "Reference subtraction: shapes are not suitable for implicit broadcast.");
1937
1938 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001939}
1940
Matteo Martincighab9e5252019-06-13 17:27:46 +01001941bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
1942 const TensorInfo& alpha,
1943 const TensorInfo& output,
1944 Optional<std::string&> reasonIfUnsupported) const
1945{
1946 bool supported = true;
1947
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001948 std::array<DataType, 5> supportedTypes
Matteo Martincighab9e5252019-06-13 17:27:46 +01001949 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001950 DataType::BFloat16,
Matteo Martincighab9e5252019-06-13 17:27:46 +01001951 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001952 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001953 DataType::QAsymmU8,
1954 DataType::QSymmS16
Matteo Martincighab9e5252019-06-13 17:27:46 +01001955 };
1956
1957 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1958 "PReLU: input is not a supported type.");
1959
1960 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
1961 "PReLU: alpha is not a supported type.");
1962
1963 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1964 "PReLU: output is not a supported type.");
1965
1966 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
1967 "PReLU: input, alpha and output types are mismatched");
1968
1969 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
1970 "PReLU: shapes are not suitable for implicit broadcast");
1971
1972 return supported;
1973}
1974
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001975bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
1976 const TensorInfo& output,
1977 const TransposeConvolution2dDescriptor& descriptor,
1978 const TensorInfo& weights,
1979 const Optional<TensorInfo>& biases,
1980 Optional<std::string&> reasonIfUnsupported) const
1981{
Jan Eilers8eb25602020-03-09 12:13:48 +00001982 IgnoreUnused(descriptor);
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001983 bool supported = true;
1984
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001985 std::array<DataType,5> supportedTypes =
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001986 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001987 DataType::BFloat16,
1988 DataType::Float32,
1989 DataType::Float16,
1990 DataType::QAsymmU8,
1991 DataType::QSymmS16
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001992 };
1993
1994 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1995 "Reference TransposeConvolution2d: input is not a supported type.");
1996
1997 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1998 "Reference TransposeConvolution2d: output is not a supported type.");
1999
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002000 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2001 "Reference TransposeConvolution2d: input and output types mismatched.");
2002
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002003
2004 const DataType inputType = input.GetDataType();
Derek Lambertif90c56d2020-01-10 17:14:08 +00002005 if (inputType == DataType::QAsymmU8)
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002006 {
Derek Lambertid466a542020-01-22 15:37:29 +00002007 ARMNN_NO_DEPRECATE_WARN_BEGIN
2008 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002009 {
Derek Lambertif90c56d2020-01-10 17:14:08 +00002010 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +00002011 DataType::QSymmS8,
2012 DataType::QuantizedSymm8PerAxis //Deprecated
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002013 };
Derek Lambertid466a542020-01-22 15:37:29 +00002014 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002015
2016 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
2017 "Reference TransposeConvolution2d: weights type not supported for "
2018 "quantized input.");
2019 }
2020 else
2021 {
2022 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
2023 "Reference TransposeConvolution2d: weights is not a supported type.");
2024
2025 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
2026 "Reference TransposeConvolution2d: input and weights types mismatched.");
2027 }
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002028
2029 if (biases.has_value())
2030 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002031 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar651aafe2019-08-05 11:52:05 +01002032 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002033 DataType::BFloat16,
2034 DataType::Float32,
2035 DataType::Float16,
2036 DataType::Signed32
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002037 };
2038 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
2039 "Reference TransposeConvolution2d: biases is not a supported type.");
2040 }
2041
2042 return supported;
2043}
2044
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002045bool RefLayerSupport::IsTransposeSupported(const TensorInfo& input,
2046 const TensorInfo& output,
2047 const TransposeDescriptor& descriptor,
2048 Optional<std::string&> reasonIfUnsupported) const
2049{
Jan Eilers8eb25602020-03-09 12:13:48 +00002050 IgnoreUnused(descriptor);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002051 bool supported = true;
2052
2053 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002054 std::array<DataType, 5> supportedTypes =
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002055 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002056 DataType::BFloat16,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002057 DataType::Float32,
2058 DataType::Float16,
2059 DataType::QAsymmU8,
2060 DataType::QSymmS16
2061 };
2062
2063 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2064 "Reference transpose: input is not a supported type.");
2065
2066 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2067 "Reference transpose: output is not a supported type.");
2068
2069 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2070 "Reference transpose: input and output types are mismatched.");
2071
2072 return supported;
2073}
2074
arovir011c7c81b2018-10-08 11:34:28 +01002075} // namespace armnn