blob: f48c120203a7eb4bfefde684ff771e66df80a180 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "RefLayerSupport.hpp"
David Beck3cc9a622018-10-12 10:38:31 +01007
Keith Davis0c2eeac2020-02-11 16:51:50 +00008#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009#include <armnn/Types.hpp>
Derek Lamberti50db4e82019-03-13 14:16:15 +000010#include <armnn/Descriptors.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000011#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan171214c2020-09-09 09:07:37 +010012#include <armnn/utility/NumericCast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <LayerSupportCommon.hpp>
Derek Lambertif674aa02019-08-01 15:56:25 +010015#include <backendsCommon/LayerSupportRules.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000016
Derek Lamberti50db4e82019-03-13 14:16:15 +000017#include <vector>
Derek Lamberti50db4e82019-03-13 14:16:15 +000018#include <array>
19
telsoa014fcda012018-03-09 14:13:49 +000020namespace armnn
21{
22
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010023namespace
24{
25
26template<typename Float32Func, typename Uint8Func, typename ... Params>
27bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
28 DataType dataType,
29 Float32Func floatFuncPtr,
30 Uint8Func uint8FuncPtr,
31 Params&&... params)
32{
33 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
34 dataType,
35 &FalseFunc<Params...>,
36 floatFuncPtr,
37 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +000038 &FalseFunc<Params...>,
kevmay012b4d88e2019-01-24 14:05:09 +000039 &FalseFunc<Params...>,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010040 std::forward<Params>(params)...);
41}
42
43} // anonymous namespace
44
James Conroy4d1ff582019-06-10 17:06:39 +010045namespace
46{
47
48std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
49 unsigned int actual,
50 std::string& layerStr,
51 std::string& tensorName)
52{
53 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
54 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
55
56 return errorMsg;
57}
58
59} // anonymous namespace
Derek Lamberti50db4e82019-03-13 14:16:15 +000060
Sadik Armagan9199e582019-09-05 17:35:31 +010061bool RefLayerSupport::IsAbsSupported(const TensorInfo& input, const TensorInfo& output,
62 Optional<std::string&> reasonIfUnsupported) const
63{
josh minor4a3c6102020-01-06 16:40:46 -060064 return IsElementwiseUnarySupported(input,
65 output,
66 ElementwiseUnaryDescriptor(UnaryOperation::Abs),
67 reasonIfUnsupported);
Sadik Armagan9199e582019-09-05 17:35:31 +010068}
69
arovir011c7c81b2018-10-08 11:34:28 +010070bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
71 const TensorInfo& output,
72 const ActivationDescriptor& descriptor,
73 Optional<std::string&> reasonIfUnsupported) const
74{
Derek Lamberti50db4e82019-03-13 14:16:15 +000075 bool supported = true;
76
77 // Define supported types.
Keith Davis0c2eeac2020-02-11 16:51:50 +000078 std::array<DataType,6> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +000079 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +000080 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +010081 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +000082 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +000083 DataType::QAsymmU8,
84 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +000085 };
86
87 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
88 "Reference activation: input type not supported.");
89
90 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
91 "Reference activation: output type not supported.");
92
93 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
94 "Reference activation: input and output types mismatched.");
95
96 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
97 "Reference activation: input and output shapes are of different rank.");
98
99
100 struct ActivationFunctionSupported : public Rule
101 {
102 ActivationFunctionSupported(const ActivationDescriptor& desc)
103 {
104 switch(desc.m_Function)
105 {
106 case ActivationFunction::Abs:
107 case ActivationFunction::BoundedReLu:
David Monahan3b3c3812020-02-25 09:03:29 +0000108 case ActivationFunction::Elu:
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000109 case ActivationFunction::HardSwish:
Derek Lamberti50db4e82019-03-13 14:16:15 +0000110 case ActivationFunction::LeakyReLu:
111 case ActivationFunction::Linear:
112 case ActivationFunction::ReLu:
113 case ActivationFunction::Sigmoid:
114 case ActivationFunction::SoftReLu:
115 case ActivationFunction::Sqrt:
116 case ActivationFunction::Square:
117 case ActivationFunction::TanH:
118 {
119 m_Res = true;
120 break;
121 }
122 default:
123 {
124 m_Res = false;
125 break;
126 }
127 }
128 }
129 };
130
131 // Function is supported
132 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
133 "Reference activation: function not supported.");
134
135 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100136}
137
138bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
139 const TensorInfo& input1,
140 const TensorInfo& output,
141 Optional<std::string&> reasonIfUnsupported) const
142{
Derek Lamberti50db4e82019-03-13 14:16:15 +0000143 bool supported = true;
144
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100145 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000146 DataType::BFloat16,
Derek Lamberti50db4e82019-03-13 14:16:15 +0000147 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100148 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000149 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000150 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100151 DataType::QSymmS16,
152 DataType::Signed32
Derek Lamberti50db4e82019-03-13 14:16:15 +0000153 };
154
155 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
156 "Reference addition: input 0 is not a supported type.");
157
158 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
159 "Reference addition: input 1 is not a supported type.");
160
161 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
162 "Reference addition: output is not a supported type.");
163
164 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
165 "Reference addition: input 0 and Input 1 types are mismatched");
166
167 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
168 "Reference addition: input and output types are mismatched");
169
170 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
171 "Reference addition: shapes are not suitable for implicit broadcast.");
172
173 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100174}
175
Nikhil Raj68c2c902019-09-19 11:21:11 +0100176bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
177 const armnn::ArgMinMaxDescriptor &descriptor,
178 armnn::Optional<std::string &> reasonIfUnsupported) const
179{
Jan Eilers8eb25602020-03-09 12:13:48 +0000180 IgnoreUnused(descriptor);
Nikhil Raj68c2c902019-09-19 11:21:11 +0100181
Teresa Charline300b362020-05-25 10:01:03 +0100182 std::array<DataType, 7> supportedTypes =
Nikhil Raj68c2c902019-09-19 11:21:11 +0100183 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000184 DataType::BFloat16,
Teresa Charline300b362020-05-25 10:01:03 +0100185 DataType::Float16,
Nikhil Raj68c2c902019-09-19 11:21:11 +0100186 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +0100187 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000188 DataType::QAsymmU8,
189 DataType::QSymmS16,
Francis Murtagh1939df52019-11-13 15:21:09 +0000190 DataType::Signed32
Nikhil Raj68c2c902019-09-19 11:21:11 +0100191 };
192
193 bool supported = true;
194
195 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
196 "Reference ArgMinMax: input is not a supported type.");
197 supported &= CheckSupportRule(TypeIs(output, DataType::Signed32), reasonIfUnsupported,
198 "Reference ArgMinMax: output type not supported");
199
200 return supported;
201}
202
arovir011c7c81b2018-10-08 11:34:28 +0100203bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
204 const TensorInfo& output,
205 const TensorInfo& mean,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100206 const TensorInfo& variance,
arovir011c7c81b2018-10-08 11:34:28 +0100207 const TensorInfo& beta,
208 const TensorInfo& gamma,
209 const BatchNormalizationDescriptor& descriptor,
210 Optional<std::string&> reasonIfUnsupported) const
211{
Jan Eilers8eb25602020-03-09 12:13:48 +0000212 IgnoreUnused(descriptor);
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100213
Sadik Armagan303980c2020-04-17 12:45:14 +0100214 std::array<DataType, 6> supportedTypes =
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100215 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000216 DataType::BFloat16,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100217 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100218 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100219 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000220 DataType::QAsymmU8,
221 DataType::QSymmS16
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100222 };
223
224 bool supported = true;
225
226 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
227 "Reference batch normalization: input is not a supported type.");
228
229 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
230 "Reference batch normalization: output is not a supported type.");
231
232 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
233 "Reference batch normalization: input and output types are mismatched");
234
235 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
236 "Reference batch normalization: mean is not a supported type.");
237
238 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
239 "Reference batch normalization: variance is not a supported type.");
240
241 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
242 "Reference batch normalization: beta is not a supported type.");
243
244 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
245 "Reference batch normalization: gamma is not a supported type.");
246
247 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100248}
249
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000250bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
251 const TensorInfo& output,
252 const BatchToSpaceNdDescriptor& descriptor,
253 Optional<std::string&> reasonIfUnsupported) const
254{
Jan Eilers8eb25602020-03-09 12:13:48 +0000255 IgnoreUnused(descriptor);
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100256
257 bool supported = true;
258
259 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
260 std::string inputTensorStr = "input";
261 std::string outputTensorStr = "output";
262
263 // Define supported types.
Sadik Armagan303980c2020-04-17 12:45:14 +0100264 std::array<DataType,6> supportedTypes =
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100265 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000266 DataType::BFloat16,
267 DataType::Float32,
268 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100269 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000270 DataType::QAsymmU8,
271 DataType::QSymmS16
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100272 };
273
274 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
275 "Reference BatchToSpaceNd: input type not supported.");
276
277 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
278 "Reference BatchToSpaceNd: output type not supported.");
279
280 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
281 "Reference BatchToSpaceNd: input and output types mismatched.");
282
283 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
284 reasonIfUnsupported,
285 CreateIncorrectDimensionsErrorMsg(4,
286 output.GetNumDimensions(),
287 batchToSpaceNdLayerStr,
288 outputTensorStr).data());
289
290 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
291 reasonIfUnsupported,
292 CreateIncorrectDimensionsErrorMsg(4,
293 input.GetNumDimensions(),
294 batchToSpaceNdLayerStr,
295 inputTensorStr).data());
296
297 return supported;
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000298}
299
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100300bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
301 const TensorInfo& input1,
302 const TensorInfo& output,
303 const ComparisonDescriptor& descriptor,
304 Optional<std::string&> reasonIfUnsupported) const
305{
Jan Eilers8eb25602020-03-09 12:13:48 +0000306 IgnoreUnused(descriptor);
Sadik Armagan303980c2020-04-17 12:45:14 +0100307 std::array<DataType, 8> supportedInputTypes =
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100308 {
Sadik Armaganb60dd242020-03-19 13:53:16 +0000309 DataType::Boolean,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000310 DataType::BFloat16,
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100311 DataType::Float32,
312 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100313 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000314 DataType::QAsymmU8,
Sadik Armaganb60dd242020-03-19 13:53:16 +0000315 DataType::QSymmS16,
316 DataType::Signed32
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100317 };
318
319 bool supported = true;
320 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
321 "Reference comparison: input 0 is not a supported type");
322
323 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
324 "Reference comparison: input 0 and Input 1 types are mismatched");
325
326 supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
327 "Reference comparison: output is not of type Boolean");
328
329 return supported;
330}
331
Jim Flynn906f9462019-05-10 13:55:21 +0100332bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
333 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +0100334 const ConcatDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100335 Optional<std::string&> reasonIfUnsupported) const
336{
Jan Eilers8eb25602020-03-09 12:13:48 +0000337 IgnoreUnused(descriptor);
Jim Flynne242f2d2019-05-22 14:24:13 +0100338
339 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000340 std::array<DataType,6> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100341 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000342 DataType::BFloat16,
343 DataType::Float32,
344 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000345 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100346 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000347 DataType::QSymmS16
Jim Flynne242f2d2019-05-22 14:24:13 +0100348 };
349
350 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
351 "Reference concatenation: output type not supported");
352 for (const TensorInfo* input : inputs)
353 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100354 ARMNN_ASSERT(input != nullptr);
Jim Flynne242f2d2019-05-22 14:24:13 +0100355 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
356 "Reference concatenation: input type not supported");
357
358 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
359 "Reference concatenation: input and output types mismatched.");
360 }
361
362 return supported;
Jim Flynn906f9462019-05-10 13:55:21 +0100363}
364
arovir011c7c81b2018-10-08 11:34:28 +0100365bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
366 Optional<std::string&> reasonIfUnsupported) const
367{
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100368 std::array<DataType,8> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100369 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000370 DataType::BFloat16,
Teresa Charlin6fa8ce62020-05-25 16:16:44 +0100371 DataType::Float16,
Nina Drozd58ef2c62019-05-16 12:09:18 +0100372 DataType::Float32,
Keith Davis67e6c542020-02-19 10:08:33 +0000373 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100374 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000375 DataType::QSymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100376 DataType::QSymmS16,
377 DataType::Signed32
Nina Drozd58ef2c62019-05-16 12:09:18 +0100378 };
379
380 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
381 "Reference constant: output is not a supported type.");
arovir011c7c81b2018-10-08 11:34:28 +0100382}
383
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000384bool RefLayerSupport::IsConvertBf16ToFp32Supported(const TensorInfo& input,
385 const TensorInfo& output,
386 Optional<std::string&> reasonIfUnsupported) const
387{
388 bool supported = true;
389
390 supported &= CheckSupportRule(TypeIs(input, DataType::BFloat16), reasonIfUnsupported,
391 "Reference for ConvertBf16ToFp32 layer: input type not supported");
392
393 supported &= CheckSupportRule(TypeIs(output, DataType::Float32), reasonIfUnsupported,
394 "Reference for ConvertBf16ToFp32 layer: output type not supported");
395
396 return supported;
397}
398
arovir011c7c81b2018-10-08 11:34:28 +0100399bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
400 const TensorInfo& output,
401 Optional<std::string&> reasonIfUnsupported) const
402{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100403 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
404 input.GetDataType(),
405 &TrueFunc<>,
406 &FalseInputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000407 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000408 &FalseFuncI32<>,
409 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100410 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
411 output.GetDataType(),
412 &FalseOutputFuncF16<>,
413 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000414 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000415 &FalseFuncI32<>,
416 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100417}
418
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000419bool RefLayerSupport::IsConvertFp32ToBf16Supported(const TensorInfo& input,
420 const TensorInfo& output,
421 Optional<std::string&> reasonIfUnsupported) const
422{
423 bool supported = true;
424
425 supported &= CheckSupportRule(TypeIs(input, DataType::Float32), reasonIfUnsupported,
426 "Reference for ConvertFp32ToBf16 layer: input type not supported");
427
428 supported &= CheckSupportRule(TypeIs(output, DataType::BFloat16), reasonIfUnsupported,
429 "Reference for ConvertFp32ToBf16 layer: output type not supported");
430
431 return supported;
432}
433
arovir011c7c81b2018-10-08 11:34:28 +0100434bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
435 const TensorInfo& output,
436 Optional<std::string&> reasonIfUnsupported) const
437{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100438 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
439 input.GetDataType(),
440 &FalseInputFuncF16<>,
441 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000442 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000443 &FalseFuncI32<>,
444 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100445 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
446 output.GetDataType(),
447 &TrueFunc<>,
448 &FalseOutputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000449 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000450 &FalseFuncI32<>,
451 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100452}
453
454bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
455 const TensorInfo& output,
456 const Convolution2dDescriptor& descriptor,
457 const TensorInfo& weights,
458 const Optional<TensorInfo>& biases,
459 Optional<std::string&> reasonIfUnsupported) const
460{
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100461 bool supported = true;
462
463 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000464 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000465 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000466 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000467 DataType::Float32,
468 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000469 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100470 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000471 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000472 DataType::QSymmS16
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100473 };
474
475 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000476 "Reference Convolution2d: input is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100477
478 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000479 "Reference Convolution2d: output is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100480
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000481 // For Convolution2d, we allow to have BFloat16 input with Float32 output for optimization.
482 if (input.GetDataType() == DataType::BFloat16)
483 {
484 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
485 {
486 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
487 supported = false;
488 }
489 }
490 else
491 {
492 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000493 "Reference Convolution2d: input and output types mismatched.");
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000494 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100495
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000496 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000497 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000498 {
Derek Lambertid466a542020-01-22 15:37:29 +0000499 ARMNN_NO_DEPRECATE_WARN_BEGIN
Keith Davis0c2eeac2020-02-11 16:51:50 +0000500 std::array<DataType, 4> supportedWeightTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000501 {
Sadik Armagan303980c2020-04-17 12:45:14 +0100502 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000503 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +0000504 DataType::QSymmS8,
505 DataType::QuantizedSymm8PerAxis // deprecated
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000506 };
Derek Lambertid466a542020-01-22 15:37:29 +0000507 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000508
509 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000510 "Reference Convolution2d: weights type not supported for quantized input.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000511 }
512 else
513 {
514 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000515 "Reference Convolution2d: weights is not a supported type.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000516
517 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000518 "Reference Convolution2d: input and weights types mismatched.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000519 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100520
521 if (biases.has_value())
522 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000523 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000524 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000525 DataType::BFloat16,
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000526 DataType::Float32,
527 DataType::Float16,
528 DataType::Signed32
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100529 };
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000530
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100531 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000532 "Reference Convolution2d: biases is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100533 }
Jan Eilers8eb25602020-03-09 12:13:48 +0000534 IgnoreUnused(descriptor);
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100535
536 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100537}
538
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000539bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
540 const TensorInfo& output,
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000541 Optional<std::string&> reasonIfUnsupported) const
542{
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100543 bool supported = true;
544
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000545 std::array<DataType, 8> supportedTypes =
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100546 {
Narumol Prangnawarat403a1852020-03-12 14:24:13 +0000547 DataType::BFloat16,
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000548 DataType::Float16,
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100549 DataType::Float32,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000550 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100551 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000552 DataType::QSymmS8,
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000553 DataType::QSymmS16,
554 DataType::Signed32
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100555 };
556
557 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000558 "Reference for Debug layer: input type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100559
560 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000561 "Reference for Debug layer: output type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100562
563 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000564 "Reference for Debug layer: input and output types are mismatched");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100565
566 return supported;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000567}
568
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100569bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
570 const TensorInfo& output,
571 const DepthToSpaceDescriptor& descriptor,
572 Optional<std::string&> reasonIfUnsupported) const
573{
Jan Eilers8eb25602020-03-09 12:13:48 +0000574 IgnoreUnused(descriptor);
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100575 bool supported = true;
576
Sadik Armagan303980c2020-04-17 12:45:14 +0100577 std::array<DataType,6> supportedTypes =
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100578 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000579 DataType::BFloat16,
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100580 DataType::Float32,
581 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100582 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000583 DataType::QAsymmU8,
584 DataType::QSymmS16
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100585 };
586
587 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
588 "Reference DepthToSpace: input type not supported");
589
590 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
591 "Reference DepthToSpace: output type not supported");
592
593 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
594 "Reference DepthToSpace: input and output types are mismatched");
595
596 return supported;
597}
598
arovir011c7c81b2018-10-08 11:34:28 +0100599bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
600 const TensorInfo& output,
601 const DepthwiseConvolution2dDescriptor& descriptor,
602 const TensorInfo& weights,
603 const Optional<TensorInfo>& biases,
604 Optional<std::string&> reasonIfUnsupported) const
605{
Sadik Armagan303980c2020-04-17 12:45:14 +0100606 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100607 bool supported = true;
608
609 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000610 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100611 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000612 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100613 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100614 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +0000615 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000616 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100617 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000618 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100619 };
620
621 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
622 "Reference DepthwiseConvolution2d: input is not a supported type.");
623
624 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
625 "Reference DepthwiseConvolution2d: output is not a supported type.");
626
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100627 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
628 "Reference DepthwiseConvolution2d: input and output types mismatched.");
629
Teresa Charlind8df0262019-11-11 12:28:15 +0000630 const DataType inputType = input.GetDataType();
Keith Davis0c2eeac2020-02-11 16:51:50 +0000631 if (IsQuantized8BitType(inputType))
Teresa Charlind8df0262019-11-11 12:28:15 +0000632 {
Sadik Armagan303980c2020-04-17 12:45:14 +0100633 ARMNN_NO_DEPRECATE_WARN_BEGIN
634 std::array<DataType, 4> supportedWeightTypes =
635 {
636 DataType::QAsymmS8,
637 DataType::QAsymmU8,
638 DataType::QSymmS8,
639 DataType::QuantizedSymm8PerAxis // deprecated
640 };
641 ARMNN_NO_DEPRECATE_WARN_END
Teresa Charlind8df0262019-11-11 12:28:15 +0000642
643 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Sadik Armagan303980c2020-04-17 12:45:14 +0100644 "Reference DepthwiseConvolution2d: weights type not supported for "
645 "quantized input.");
Teresa Charlind8df0262019-11-11 12:28:15 +0000646 }
647 else
648 {
649 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
650 "Reference DepthwiseConvolution2d: weights is not a supported type.");
651
652 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
653 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
654 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100655
656 if (biases.has_value())
657 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000658 std::array<DataType,4> biasesSupportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100659 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000660 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100661 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100662 DataType::Float16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100663 DataType::Signed32
664 };
665 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
666 "Reference DepthwiseConvolution2d: biases is not a supported type.");
667 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100668
669 return supported;
670
arovir011c7c81b2018-10-08 11:34:28 +0100671}
672
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000673bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
674 const TensorInfo& output,
675 Optional<std::string&> reasonIfUnsupported) const
676{
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100677 bool supported = true;
678
Ryan OShea9add1202020-02-07 10:06:33 +0000679 std::array<DataType,4> supportedInputTypes = {
680 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000681 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +0000682 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000683 DataType::QSymmS16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100684 };
685
686 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000687 "Reference for Dequantize layer: input type not supported.");
688
689 supported &= CheckSupportRule( TypeNotPerAxisQuantized(input), reasonIfUnsupported,
690 "Reference for Dequantize layer: per-axis quantized input not support .");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100691
Derek Lambertid466a542020-01-22 15:37:29 +0000692 supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
693 "Reference dequantize: per-axis quantized input not support .");
694
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000695 std::array<DataType,3> supportedOutputTypes = {
696 DataType::BFloat16,
Jan Eilersf7107932019-11-01 11:09:36 +0000697 DataType::Float32,
698 DataType::Float16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100699 };
700
701 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000702 "Reference for Dequantize layer: output type not supported.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100703
704 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000705 "Reference for Dequantize layer: input/output shapes have different num total "
706 "elements.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100707
708 return supported;
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000709}
710
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000711bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
712 const TensorInfo& scores,
713 const TensorInfo& anchors,
714 const TensorInfo& detectionBoxes,
715 const TensorInfo& detectionClasses,
716 const TensorInfo& detectionScores,
717 const TensorInfo& numDetections,
718 const DetectionPostProcessDescriptor& descriptor,
719 Optional<std::string&> reasonIfUnsupported) const
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000720{
Jan Eilers8eb25602020-03-09 12:13:48 +0000721 IgnoreUnused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
Derek Lamberti901ea112019-12-10 22:07:09 +0000722
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100723 bool supported = true;
724
Sadik Armagan303980c2020-04-17 12:45:14 +0100725 std::array<DataType,5> supportedInputTypes =
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100726 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000727 DataType::BFloat16,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100728 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +0100729 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000730 DataType::QAsymmU8,
731 DataType::QSymmS16
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100732 };
733
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000734 supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100735 "Reference DetectionPostProcess: input 0 is not a supported type.");
736
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000737 supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100738 "Reference DetectionPostProcess: input 1 is not a supported type.");
739
740 return supported;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000741}
742
Pablo Tellof0bd6832019-04-26 17:58:13 +0100743bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
744 const TensorInfo& output,
745 const DepthwiseConvolution2dDescriptor& descriptor,
746 const TensorInfo& weights,
747 const Optional<TensorInfo>& biases,
748 Optional<std::string&> reasonIfUnsupported) const
749{
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100750 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
Pablo Tellof0bd6832019-04-26 17:58:13 +0100751}
752
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100753bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
arovir011c7c81b2018-10-08 11:34:28 +0100754 const TensorInfo& input1,
755 const TensorInfo& output,
756 Optional<std::string&> reasonIfUnsupported) const
757{
Sadik Armagan2999a022019-04-09 14:20:12 +0100758 bool supported = true;
759
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100760 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000761 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +0100762 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100763 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100764 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000765 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +0100766 DataType::QSymmS16,
767 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +0100768 };
769
770 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
771 "Reference division: input 0 is not a supported type.");
772
773 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
774 "Reference division: input 1 is not a supported type.");
775
776 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
777 "Reference division: output is not a supported type.");
778
779 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
780 "Reference division: input 0 and Input 1 types are mismatched");
781
782 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
783 "Reference division: input and output types are mismatched");
784
785 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
786 "Reference division: shapes are not suitable for implicit broadcast.");
787
788 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100789}
790
josh minor4a3c6102020-01-06 16:40:46 -0600791bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
792 const TensorInfo& output,
793 const ElementwiseUnaryDescriptor& descriptor,
794 Optional<std::string&> reasonIfUnsupported) const
795{
Jan Eilers8eb25602020-03-09 12:13:48 +0000796 IgnoreUnused(descriptor);
josh minor4a3c6102020-01-06 16:40:46 -0600797
Sadik Armagan303980c2020-04-17 12:45:14 +0100798 std::array<DataType, 7> supportedTypes =
josh minor4a3c6102020-01-06 16:40:46 -0600799 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000800 DataType::BFloat16,
josh minor4a3c6102020-01-06 16:40:46 -0600801 DataType::Float32,
802 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100803 DataType::QAsymmS8,
josh minor4a3c6102020-01-06 16:40:46 -0600804 DataType::QAsymmU8,
Sadik Armaganac472102020-03-24 09:54:36 +0000805 DataType::QSymmS16,
806 DataType::Signed32
josh minor4a3c6102020-01-06 16:40:46 -0600807 };
808
809 bool supported = true;
810
811 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
812 "Reference elementwise unary: input type not supported");
813
814 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
815 "Reference elementwise unary: output type not supported");
816
817 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
818 "Reference elementwise unary: input and output types not matching");
819
820 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
821 "Reference elementwise unary: input and output shapes"
822 "have different number of total elements");
823
824 return supported;
825}
826
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000827bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
828 const TensorInfo& input1,
829 const TensorInfo& output,
830 Optional<std::string&> reasonIfUnsupported) const
831{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100832 return IsComparisonSupported(input0,
833 input1,
834 output,
835 ComparisonDescriptor(ComparisonOperation::Equal),
836 reasonIfUnsupported);
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000837}
838
arovir011c7c81b2018-10-08 11:34:28 +0100839bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
840 const FakeQuantizationDescriptor& descriptor,
841 Optional<std::string&> reasonIfUnsupported) const
842{
Jan Eilers8eb25602020-03-09 12:13:48 +0000843 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100844 bool supported = true;
845
846 std::array<DataType,1> supportedTypes =
847 {
848 DataType::Float32
849 };
850
851 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
852 "Reference fake quantization: input type not supported.");
853
854 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100855}
856
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100857bool RefLayerSupport::IsFillSupported(const TensorInfo& input,
858 const TensorInfo& output,
859 const FillDescriptor& descriptor,
860 Optional<std::string&> reasonIfUnsupported) const
861{
862 IgnoreUnused(descriptor);
863 IgnoreUnused(output);
864
865 bool supported = true;
866
Sadik Armagana792a052020-06-23 16:22:23 +0100867 std::array<DataType,3> supportedTypes =
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100868 {
869 DataType::Float32,
Sadik Armagana792a052020-06-23 16:22:23 +0100870 DataType::Float16,
871 DataType::Signed32
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100872 };
873
Teresa Charlin4b10fef2020-07-29 09:36:41 +0100874 supported &= CheckSupportRule(TypeIs(input, DataType::Signed32), reasonIfUnsupported,
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100875 "Reference Fill: input type not supported.");
876
Teresa Charlin44088502020-07-27 11:27:19 +0100877 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
878 "Reference Fill: output type not supported.");
Ryan OSheaf4bfa6a2020-06-10 11:33:37 +0100879 return supported;
880}
881
arovir011c7c81b2018-10-08 11:34:28 +0100882bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
883 const TensorInfo& output,
884 Optional<std::string&> reasonIfUnsupported) const
885{
Jan Eilers8eb25602020-03-09 12:13:48 +0000886 IgnoreUnused(output);
James Conroy83735b12019-05-30 16:36:59 +0100887 bool supported = true;
888
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100889 std::array<DataType,3> supportedTypes =
James Conroy83735b12019-05-30 16:36:59 +0100890 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000891 DataType::BFloat16,
James Conroyb40d7102019-06-04 12:32:09 +0100892 DataType::Float32,
Francis Murtaghe8ac1332020-07-30 18:03:40 +0100893 DataType::Float16
James Conroy83735b12019-05-30 16:36:59 +0100894 };
895
896 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
897 "Reference Floor: input type not supported.");
898
899 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
900 "Reference Floor: output type not supported.");
901
902 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100903}
904
905bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
906 const TensorInfo& output,
907 const TensorInfo& weights,
908 const TensorInfo& biases,
909 const FullyConnectedDescriptor& descriptor,
910 Optional<std::string&> reasonIfUnsupported) const
911{
Francis Murtagh46c09d02019-05-28 08:15:28 +0100912 bool supported = true;
913
914 // Define supported types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000915 std::array<DataType,6> supportedTypes =
Francis Murtagh46c09d02019-05-28 08:15:28 +0100916 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000917 DataType::BFloat16,
918 DataType::Float32,
919 DataType::Float16,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000920 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +0100921 DataType::QAsymmU8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000922 DataType::QSymmS16
Francis Murtagh46c09d02019-05-28 08:15:28 +0100923 };
924
925 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
926 "Reference Fully Connected: input type not supported.");
927
928 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
929 "Reference Fully Connected: output type not supported.");
930
Francis Murtagh46c09d02019-05-28 08:15:28 +0100931 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
932 "Reference Fully Connected: weights type not supported.");
933
Narumol Prangnawarat57ef0082020-03-26 09:20:43 +0000934 // For FullyConnected, we allow to have BFloat16 input with Float32 output for optimization.
935 if (input.GetDataType() == DataType::BFloat16)
936 {
937 if (output.GetDataType() != DataType::BFloat16 && output.GetDataType() != DataType::Float32)
938 {
939 reasonIfUnsupported.value() += "Output tensor type must be BFloat16 or Float32 for BFloat16 input.\n";
940 supported = false;
941 }
942 }
943 else
944 {
945 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
946 "Reference Fully Connected: input and output types mismatched.");
947 }
948
Jan Eilers1f45dc32020-06-15 11:43:03 +0100949 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
950 "Reference Fully Connected: weights is not a supported type.");
Francis Murtaghddb1d062020-03-10 13:51:45 +0000951
Jan Eilers1f45dc32020-06-15 11:43:03 +0100952 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
953 "Reference Fully Connected: input and weights types mismatched.");
Francis Murtagh46c09d02019-05-28 08:15:28 +0100954
955 if (descriptor.m_BiasEnabled)
956 {
957 // Defined supported types for bias
Sadik Armagandb73c982020-04-01 17:35:30 +0100958 std::array<DataType, 5>
Francis Murtagh46c09d02019-05-28 08:15:28 +0100959 supportedBiasTypes =
960 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000961 DataType::BFloat16,
Francis Murtagh46c09d02019-05-28 08:15:28 +0100962 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100963 DataType::Float16,
Sadik Armagandb73c982020-04-01 17:35:30 +0100964 DataType::Signed32,
965 DataType::QAsymmS8
Francis Murtagh46c09d02019-05-28 08:15:28 +0100966 };
967
968 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
969 "Reference Fully Connected: bias type not supported.");
970
971 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
972 "Reference Fully Connected: bias and weight types mismatch.");
973
974 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
975 "Reference Fully Connected: bias type inferred from weights is incompatible.");
976
Narumol Prangnawarat366d7232020-04-29 12:58:17 +0100977 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(biases, 1U), reasonIfUnsupported,
978 "Reference Fully Connected: bias must have 1 dimension.");
979
Francis Murtagh46c09d02019-05-28 08:15:28 +0100980 }
981
982 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100983}
984
narpra014951d842019-01-18 16:53:53 +0000985bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
986 const armnn::TensorInfo& input1,
987 const armnn::TensorInfo& output,
Teresa Charlin52664732020-06-29 16:27:03 +0100988 const GatherDescriptor& descriptor,
narpra014951d842019-01-18 16:53:53 +0000989 armnn::Optional<std::string&> reasonIfUnsupported) const
990{
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100991 bool supported = true;
Teresa Charlin3940d8b2020-05-29 16:47:23 +0100992 std::array<DataType,7> supportedTypes =
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100993 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000994 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100995 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100996 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +0100997 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000998 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +0100999 DataType::QSymmS16,
1000 DataType::Signed32
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001001 };
1002
Teresa Charlin52664732020-06-29 16:27:03 +01001003 if (descriptor.m_Axis != 0)
1004 {
1005 reasonIfUnsupported.value() += std::string("Reference Gather: axis not supported\n");
1006 supported &= false;
1007 }
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +01001008 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1009 "Reference Gather: input type not supported");
1010
1011 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1012 "Reference Gather: output type not supported");
1013
1014 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
1015 "Reference Gather: indices (input1) type not supported");
1016
1017 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1018 "Reference Gather: input and output types not matching");
1019
1020 return supported;
narpra014951d842019-01-18 16:53:53 +00001021}
1022
FrancisMurtagh878f0232018-12-19 10:56:15 +00001023bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
1024 const TensorInfo& input1,
1025 const TensorInfo& output,
1026 Optional<std::string&> reasonIfUnsupported) const
1027{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001028 return IsComparisonSupported(input0,
1029 input1,
1030 output,
1031 ComparisonDescriptor(ComparisonOperation::Greater),
1032 reasonIfUnsupported);
FrancisMurtagh878f0232018-12-19 10:56:15 +00001033}
1034
Derek Lamberti901ea112019-12-10 22:07:09 +00001035bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
1036 Optional<std::string&> /*reasonIfUnsupported*/) const
arovir011c7c81b2018-10-08 11:34:28 +01001037{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001038 return true;
arovir011c7c81b2018-10-08 11:34:28 +01001039}
1040
Kevin May09ca49c2019-10-09 12:37:34 +01001041bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
1042 const TensorInfo& output,
1043 const InstanceNormalizationDescriptor& descriptor,
1044 Optional<std::string&> reasonIfUnsupported) const
1045{
Jan Eilers8eb25602020-03-09 12:13:48 +00001046 IgnoreUnused(descriptor);
Kevin May09ca49c2019-10-09 12:37:34 +01001047 // Define supported types
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001048 std::array<DataType, 3> supportedTypes =
Kevin May09ca49c2019-10-09 12:37:34 +01001049 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001050 DataType::BFloat16,
Kevin May09ca49c2019-10-09 12:37:34 +01001051 DataType::Float32,
1052 DataType::Float16
1053 };
1054
1055 bool supported = true;
1056
1057 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1058 "Reference Instance Normalization: input type not supported.");
1059
1060 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1061 "Reference Instance Normalization: output type not supported.");
1062
1063 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1064 "Reference Instance Normalization: input and output types mismatched.");
1065
1066 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1067 "Reference Instance Normalization: input and output shapes have different "
1068 "num total elements.");
1069
1070 return supported;
1071}
1072
arovir011c7c81b2018-10-08 11:34:28 +01001073bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
1074 const TensorInfo& output,
1075 const L2NormalizationDescriptor& descriptor,
1076 Optional<std::string&> reasonIfUnsupported) const
1077{
Jan Eilers8eb25602020-03-09 12:13:48 +00001078 IgnoreUnused(descriptor);
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001079 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001080 std::array<DataType, 6> supportedTypes =
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001081 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001082 DataType::BFloat16,
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001083 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001084 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001085 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001086 DataType::QAsymmU8,
1087 DataType::QSymmS16
Ferran Balaguerd73d14f2019-06-10 10:29:54 +01001088 };
1089
1090 bool supported = true;
1091
1092 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1093 "Reference L2normalization: input type not supported.");
1094
1095 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1096 "Reference L2normalization: output type not supported.");
1097
1098 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1099 "Reference L2normalization: input and output types mismatched.");
1100
1101 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1102 "Reference L2normalization: input and output shapes have different "
1103 "num total elements.");
1104
1105 return supported;
arovir011c7c81b2018-10-08 11:34:28 +01001106}
1107
James Conroyaba90cd2020-11-06 16:28:18 +00001108bool RefLayerSupport::IsLogicalBinarySupported(const TensorInfo& input0,
1109 const TensorInfo& input1,
1110 const TensorInfo& output,
1111 const LogicalBinaryDescriptor& descriptor,
1112 Optional<std::string&> reasonIfUnsupported) const
1113{
1114 IgnoreUnused(descriptor);
1115
1116 std::array<DataType, 1> supportedTypes =
1117 {
1118 DataType::Boolean
1119 };
1120
1121 bool supported = true;
1122 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1123 "Reference LogicalBinary: input 0 type not supported");
1124 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1125 "Reference LogicalBinary: input 1 type not supported");
1126
1127 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1128 "Reference LogicalBinary: input and output types do not match");
1129
1130 return supported;
1131}
1132
1133bool RefLayerSupport::IsLogicalUnarySupported(const TensorInfo& input,
1134 const TensorInfo& output,
1135 const ElementwiseUnaryDescriptor& descriptor,
1136 Optional<std::string&> reasonIfUnsupported) const
1137{
1138 IgnoreUnused(descriptor);
1139
1140 std::array<DataType, 1> supportedTypes =
1141 {
1142 DataType::Boolean
1143 };
1144
1145 bool supported = true;
1146 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1147 "Reference LogicalUnary: input type not supported");
1148
1149 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1150 "Reference LogicalUnary: input and output types do not match");
1151
1152 return supported;
1153}
1154
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001155bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
1156 const TensorInfo& output,
1157 const LogSoftmaxDescriptor& descriptor,
1158 Optional<std::string&> reasonIfUnsupported) const
1159{
Jan Eilers8eb25602020-03-09 12:13:48 +00001160 IgnoreUnused(descriptor);
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001161
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001162 std::array<DataType, 3> supportedTypes =
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001163 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001164 DataType::BFloat16,
1165 DataType::Float32,
1166 DataType::Float16
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001167 };
1168
1169 bool supported = true;
1170 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1171 "Reference LogSoftmax: input type not supported");
1172
1173 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1174 "Reference LogSoftmax: output type not supported");
1175
1176 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1177 "Reference LogSoftmax: input and output types do not match");
1178
1179 return supported;
1180}
1181
arovir011c7c81b2018-10-08 11:34:28 +01001182bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1183 const TensorInfo& outputStateIn,
1184 const TensorInfo& cellStateIn,
1185 const TensorInfo& scratchBuffer,
1186 const TensorInfo& outputStateOut,
1187 const TensorInfo& cellStateOut,
1188 const TensorInfo& output,
1189 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001190 const LstmInputParamsInfo& paramsInfo,
1191 Optional<std::string&> reasonIfUnsupported) const
arovir011c7c81b2018-10-08 11:34:28 +01001192{
Jan Eilers8eb25602020-03-09 12:13:48 +00001193 IgnoreUnused(descriptor);
1194 IgnoreUnused(paramsInfo);
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001195
1196 bool supported = true;
1197
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001198 std::array<DataType,3> supportedTypes = {
1199 DataType::BFloat16,
Conor Kennedyb9971c92019-05-07 07:14:23 +01001200 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001201 DataType::QSymmS16
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001202 };
1203
Jan Eilersd01a83c2019-07-03 18:20:40 +01001204 // check inputs and outputs
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001205 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1206 "Reference Lstm: input is not a supported type.");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001207 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1208 "Reference Lstm: input and outputStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001209 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1210 "Reference Lstm: input and cellStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001211 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1212 "Reference Lstm: input and scratchBuffer types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001213 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1214 "Reference Lstm: input and outputStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001215 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1216 "Reference Lstm: input and cellStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001217 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1218 "Reference Lstm: input and output types are mismatched");
Jan Eilersd01a83c2019-07-03 18:20:40 +01001219 // check layer parameters
Francis Murtaghbb590b42019-08-14 09:51:36 +01001220 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001221 "Reference Lstm: input and InputToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001222 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001223 "Reference Lstm: input and InputToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001224 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001225 "Reference Lstm: input and InputToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001226 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001227 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001228 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001229 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001230 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001231 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001232 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001233 "Reference Lstm: input and ForgetGateBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001234 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001235 "Reference Lstm: input and CellBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001236 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001237 "Reference Lstm: input and OutputGateBias types are mismatched");
1238 if (!descriptor.m_CifgEnabled)
1239 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001240 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001241 "Reference Lstm: input and InputToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001242 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001243 reasonIfUnsupported,
1244 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001245 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001246 "Reference Lstm: input and InputGateBias types are mismatched");
1247 if (descriptor.m_PeepholeEnabled)
1248 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001249 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001250 reasonIfUnsupported,
1251 "Reference Lstm: input and CellToInputWeights types are mismatched");
1252 }
1253 }
1254 if (descriptor.m_PeepholeEnabled)
1255 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001256 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001257 "Reference Lstm: input and CellToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001258 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001259 "Reference Lstm: input and CellToOutputWeights types are mismatched");
1260 }
1261 if (descriptor.m_ProjectionEnabled)
1262 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001263 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001264 "Reference Lstm: input and mProjectionWeights types are mismatched");
1265 if (paramsInfo.m_ProjectionBias != nullptr)
1266 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001267 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001268 "Reference Lstm: input and ProjectionBias types are mismatched");
1269 }
1270 }
1271 if (descriptor.m_LayerNormEnabled)
1272 {
1273 if (!descriptor.m_CifgEnabled)
1274 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001275 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001276 reasonIfUnsupported,
1277 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1278 }
Francis Murtaghbb590b42019-08-14 09:51:36 +01001279 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001280 reasonIfUnsupported,
1281 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001282 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001283 reasonIfUnsupported,
1284 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001285 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001286 reasonIfUnsupported,
1287 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1288 }
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001289
1290 return supported;
telsoa01c577f2c2018-08-31 09:22:23 +01001291}
1292
saoste012df12b32018-11-28 16:57:20 +00001293bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1294 const TensorInfo& input1,
1295 const TensorInfo& output,
1296 Optional<std::string&> reasonIfUnsupported) const
1297{
Sadik Armagan2999a022019-04-09 14:20:12 +01001298 bool supported = true;
1299
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001300 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001301 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001302 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001303 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001304 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001305 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001306 DataType::QSymmS16,
1307 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001308 };
1309
1310 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1311 "Reference maximum: input 0 is not a supported type.");
1312
1313 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1314 "Reference maximum: input 1 is not a supported type.");
1315
1316 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1317 "Reference maximum: output is not a supported type.");
1318
1319 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1320 "Reference maximum: input 0 and Input 1 types are mismatched");
1321
1322 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1323 "Reference maximum: input and output types are mismatched");
1324
1325 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1326 "Reference maximum: shapes are not suitable for implicit broadcast.");
1327
1328 return supported;
saoste012df12b32018-11-28 16:57:20 +00001329}
1330
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001331bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1332 const TensorInfo& output,
1333 const MeanDescriptor& descriptor,
1334 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001335{
James Conroy4d1ff582019-06-10 17:06:39 +01001336 bool supported = true;
1337 std::string meanLayerStr = "Mean";
1338 std::string outputTensorStr = "output";
1339
Sadik Armagan303980c2020-04-17 12:45:14 +01001340 std::array<DataType,6> supportedTypes =
James Conroy4d1ff582019-06-10 17:06:39 +01001341 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001342 DataType::BFloat16,
James Conroy4d1ff582019-06-10 17:06:39 +01001343 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001344 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001345 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001346 DataType::QAsymmU8,
1347 DataType::QSymmS16
James Conroy4d1ff582019-06-10 17:06:39 +01001348 };
1349
1350 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1351 "Reference Mean: input type not supported.");
1352
1353 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1354 "Reference Mean: input and output types are mismatched");
1355
1356 if (descriptor.m_KeepDims)
1357 {
1358 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1359 reasonIfUnsupported,
1360 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1361 output.GetNumDimensions(),
1362 meanLayerStr, outputTensorStr).data());
1363 }
1364 else if (descriptor.m_Axis.empty())
1365 {
1366 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1367 reasonIfUnsupported,
1368 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1369 meanLayerStr, outputTensorStr).data());
1370 }
1371 else
1372 {
Matthew Sloyan171214c2020-09-09 09:07:37 +01001373 auto outputDim = input.GetNumDimensions() - armnn::numeric_cast<unsigned int>(descriptor.m_Axis.size());
James Conroy4d1ff582019-06-10 17:06:39 +01001374
1375 if (outputDim > 0)
1376 {
1377 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1378 reasonIfUnsupported,
1379 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1380 meanLayerStr, outputTensorStr).data());
1381 }
1382 else
1383 {
1384 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1385 reasonIfUnsupported,
1386 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1387 meanLayerStr, outputTensorStr).data());
1388 }
1389 }
1390
1391 return supported;
narpra0132b90462018-09-13 11:07:48 +01001392}
1393
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001394bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
Nikhil Raj8599a412018-11-19 14:51:07 +00001395 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +01001396 const MergerDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001397 Optional<std::string&> reasonIfUnsupported) const
1398{
Jim Flynne242f2d2019-05-22 14:24:13 +01001399 return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001400}
1401
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001402bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1403 const TensorInfo &output,
1404 Optional<std::string &> reasonIfUnsupported) const
1405{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001406 bool supported = true;
1407
Sadik Armagan303980c2020-04-17 12:45:14 +01001408 std::array<DataType,7> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001409 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001410 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001411 DataType::Float32,
1412 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001413 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001414 DataType::QAsymmU8,
1415 DataType::QSymmS16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001416 DataType::Boolean
1417 };
1418
1419 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1420 "Reference MemCopy: input type not supported");
1421
1422 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1423 "Reference MemCopy: output type not supported");
1424
1425 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1426 "Reference MemCopy: input and output types are mismatched");
1427
1428 return supported;
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001429}
1430
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001431bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1432 const TensorInfo& input1,
1433 const TensorInfo& output,
1434 Optional<std::string&> reasonIfUnsupported) const
1435{
Sadik Armagan2999a022019-04-09 14:20:12 +01001436 bool supported = true;
1437
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001438 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001439 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001440 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001441 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001442 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001443 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001444 DataType::QSymmS16,
1445 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001446 };
1447
1448 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1449 "Reference minimum: input 0 is not a supported type.");
1450
1451 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1452 "Reference minimum: input 1 is not a supported type.");
1453
1454 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1455 "Reference minimum: output is not a supported type.");
1456
1457 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1458 "Reference minimum: input 0 and Input 1 types are mismatched");
1459
1460 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1461 "Reference minimum: input and output types are mismatched");
1462
1463 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1464 "Reference minimum: shapes are not suitable for implicit broadcast.");
1465
1466 return supported;
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001467}
1468
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001469bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1470 const TensorInfo& input1,
1471 const TensorInfo& output,
1472 Optional<std::string&> reasonIfUnsupported) const
1473{
Sadik Armagan2999a022019-04-09 14:20:12 +01001474 bool supported = true;
1475
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001476 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001477 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01001478 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001479 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001480 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001481 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01001482 DataType::QSymmS16,
1483 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01001484 };
1485
1486 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1487 "Reference multiplication: input 0 is not a supported type.");
1488
1489 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1490 "Reference multiplication: input 1 is not a supported type.");
1491
1492 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1493 "Reference multiplication: output is not a supported type.");
1494
1495 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1496 "Reference multiplication: input 0 and Input 1 types are mismatched");
1497
1498 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1499 "Reference multiplication: input and output types are mismatched");
1500
1501 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1502 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1503
1504 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001505}
1506
1507bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1508 const TensorInfo& output,
1509 const NormalizationDescriptor& descriptor,
1510 Optional<std::string&> reasonIfUnsupported) const
Nina Drozd661dfa72018-10-02 11:14:17 +01001511{
Jan Eilers8eb25602020-03-09 12:13:48 +00001512 IgnoreUnused(descriptor);
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001513
1514 // Define supported types
Sadik Armagan303980c2020-04-17 12:45:14 +01001515 std::array<DataType, 6> supportedTypes =
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001516 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001517 DataType::BFloat16,
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001518 DataType::Float16,
1519 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001520 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001521 DataType::QAsymmU8,
1522 DataType::QSymmS16
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001523 };
1524
1525 bool supported = true;
1526
1527 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1528 "Reference normalization: input type not supported.");
1529
1530 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1531 "Reference normalization: output type not supported.");
1532
1533 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1534 "Reference normalization: input and output shapes have different "
1535 "num total elements.");
1536
1537 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001538}
1539
Derek Lamberti901ea112019-12-10 22:07:09 +00001540bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1541 Optional<std::string&> /*reasonIfUnsupported*/) const
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001542{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001543 return true;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001544}
1545
1546bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1547 const TensorInfo& output,
1548 const PadDescriptor& descriptor,
1549 Optional<std::string&> reasonIfUnsupported) const
1550{
Jan Eilers8eb25602020-03-09 12:13:48 +00001551 IgnoreUnused(descriptor);
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001552 bool supported = true;
1553
1554 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001555 std::array<DataType,6> supportedTypes =
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001556 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001557 DataType::BFloat16,
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001558 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001559 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001560 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001561 DataType::QAsymmU8,
1562 DataType::QSymmS16
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001563 };
1564
1565 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1566 "Reference pad: input is not a supported type.");
1567
1568 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1569 "Reference pad: output is not a supported type.");
1570
1571 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1572 "Reference pad: input and output types are mismatched.");
1573
1574 return supported;
Nina Drozd661dfa72018-10-02 11:14:17 +01001575}
1576
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001577bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1578 const TensorInfo& output,
1579 const PermuteDescriptor& descriptor,
1580 Optional<std::string&> reasonIfUnsupported) const
1581{
Jan Eilers8eb25602020-03-09 12:13:48 +00001582 IgnoreUnused(descriptor);
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001583 bool supported = true;
1584
1585 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01001586 std::array<DataType, 6> supportedTypes =
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001587 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001588 DataType::BFloat16,
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001589 DataType::Float32,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001590 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001591 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001592 DataType::QAsymmU8,
1593 DataType::QSymmS16
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001594 };
1595
1596 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1597 "Reference permute: input is not a supported type.");
1598
1599 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1600 "Reference permute: output is not a supported type.");
1601
1602 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1603 "Reference permute: input and output types are mismatched.");
1604
1605 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001606}
1607
1608bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1609 const TensorInfo& output,
1610 const Pooling2dDescriptor& descriptor,
1611 Optional<std::string&> reasonIfUnsupported) const
1612{
Jan Eilers8eb25602020-03-09 12:13:48 +00001613 IgnoreUnused(descriptor);
Teresa Charlina3b20472019-06-06 11:12:32 +01001614 bool supported = true;
1615
1616 // Define supported output and inputs types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001617 std::array<DataType,6> supportedTypes =
Teresa Charlina3b20472019-06-06 11:12:32 +01001618 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001619 DataType::BFloat16,
Teresa Charlina3b20472019-06-06 11:12:32 +01001620 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001621 DataType::Float16,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001622 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001623 DataType::QAsymmU8,
1624 DataType::QSymmS16
Teresa Charlina3b20472019-06-06 11:12:32 +01001625 };
1626
1627 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1628 "Reference poolind2d: input is not a supported type.");
1629
1630 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1631 "Reference poolind2d: output is not a supported type.");
1632
1633 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1634 "Reference poolind2d: input and output types are mismatched.");
1635
1636 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001637}
1638
James Conroy4f1f8992020-04-29 20:01:10 +01001639bool RefLayerSupport::IsQLstmSupported(const TensorInfo& input,
1640 const TensorInfo& previousOutputIn,
1641 const TensorInfo& previousCellStateIn,
1642 const TensorInfo& outputStateOut,
1643 const TensorInfo& cellStateOut,
1644 const TensorInfo& output,
1645 const QLstmDescriptor& descriptor,
1646 const LstmInputParamsInfo& paramsInfo,
1647 Optional<std::string&> reasonIfUnsupported) const
1648{
1649 IgnoreUnused(input);
1650 IgnoreUnused(previousOutputIn);
1651 IgnoreUnused(previousCellStateIn);
1652 IgnoreUnused(outputStateOut);
1653 IgnoreUnused(cellStateOut);
1654 IgnoreUnused(output);
1655 IgnoreUnused(descriptor);
1656 IgnoreUnused(paramsInfo);
1657
1658 IgnoreUnused(reasonIfUnsupported);
1659
1660 return true;
1661}
1662
Derek Lamberti5f400d62019-03-25 15:41:58 +00001663bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1664 const TensorInfo& output,
1665 Optional<std::string&> reasonIfUnsupported) const
1666{
1667 bool supported = true;
1668
Finn Williamsfd271062019-12-04 14:27:27 +00001669 // Define supported input types.
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001670 std::array<DataType,7> supportedInputTypes = {
1671 DataType::BFloat16,
Keith Davis5e51cd82020-01-29 16:52:59 +00001672 DataType::Float32,
Keith Davis3d8bc972020-02-04 09:31:47 +00001673 DataType::Float16,
Ryan OShea9add1202020-02-07 10:06:33 +00001674 DataType::QAsymmS8,
Keith Davis5e51cd82020-01-29 16:52:59 +00001675 DataType::QAsymmU8,
1676 DataType::QSymmS8,
1677 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001678 };
1679
1680 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1681 "Reference quantize: input type not supported.");
1682
1683 // Define supported output types.
Ryan OShea9add1202020-02-07 10:06:33 +00001684 std::array<DataType,4> supportedOutputTypes = {
Ryan OShea9add1202020-02-07 10:06:33 +00001685 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001686 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +00001687 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001688 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001689 };
1690 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1691 "Reference quantize: output type not supported.");
1692
1693 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1694 "Reference quantize: input and output shapes have different num total elements.");
1695
1696 return supported;
1697}
1698
Finn Williams2605b232020-06-10 15:53:46 +01001699bool RefLayerSupport::IsRankSupported(const TensorInfo& input,
1700 const TensorInfo& output,
1701 Optional<std::string&> reasonIfUnsupported) const
1702{
1703 IgnoreUnused(input);
1704 // Define supported output types.
1705 std::array<DataType,1> supportedOutputTypes =
1706 {
1707 DataType::Signed32,
1708 };
1709
1710 return CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1711 "Reference rank: input type not supported.");
1712}
1713
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001714bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001715 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001716 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001717 Optional<std::string&> reasonIfUnsupported) const
1718{
Jan Eilers8eb25602020-03-09 12:13:48 +00001719 IgnoreUnused(output);
1720 IgnoreUnused(descriptor);
Nina Drozd2f2778f2019-05-27 10:37:05 +01001721 // Define supported output types.
Keith Davis0c2eeac2020-02-11 16:51:50 +00001722 std::array<DataType,7> supportedOutputTypes =
Nina Drozd2f2778f2019-05-27 10:37:05 +01001723 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001724 DataType::BFloat16,
Nina Drozd2f2778f2019-05-27 10:37:05 +01001725 DataType::Float32,
1726 DataType::Float16,
Narumol Prangnawarat0718ee92019-09-13 16:53:38 +01001727 DataType::Signed32,
Keith Davis0c2eeac2020-02-11 16:51:50 +00001728 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001729 DataType::QAsymmU8,
1730 DataType::QSymmS16
Nina Drozd2f2778f2019-05-27 10:37:05 +01001731 };
Keith Davis0c2eeac2020-02-11 16:51:50 +00001732
Nina Drozd2f2778f2019-05-27 10:37:05 +01001733 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1734 "Reference reshape: input type not supported.");
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001735}
1736
1737bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
Sadik Armaganc625f002018-12-17 11:32:16 +00001738 const TensorInfo& output,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001739 Optional<std::string&> reasonIfUnsupported) const
1740{
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001741 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001742 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001743 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001744 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001745 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001746 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001747 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001748 DataType::QAsymmU8,
1749 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001750 };
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001751
1752 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1753 "Reference ResizeBilinear: input type not supported");
1754
1755 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1756 "Reference ResizeBilinear: output type not supported");
1757
1758 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1759 "Reference ResizeBilinear: input and output types not matching");
1760
1761 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001762}
1763
Teresa Charlin970f43b2019-07-01 13:51:07 +01001764bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1765 const TensorInfo& output,
1766 const ResizeDescriptor& descriptor,
1767 Optional<std::string&> reasonIfUnsupported) const
1768{
Jan Eilers8eb25602020-03-09 12:13:48 +00001769 IgnoreUnused(descriptor);
Teresa Charlin970f43b2019-07-01 13:51:07 +01001770 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001771 std::array<DataType,6> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001772 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001773 DataType::BFloat16,
Teresa Charlin970f43b2019-07-01 13:51:07 +01001774 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001775 DataType::Float16,
Keith Davis67e6c542020-02-19 10:08:33 +00001776 DataType::QAsymmS8,
Sadik Armagan303980c2020-04-17 12:45:14 +01001777 DataType::QAsymmU8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001778 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001779 };
1780
1781 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1782 "Reference Resize: input type not supported");
1783
1784 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1785 "Reference Resize: output type not supported");
1786
1787 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1788 "Reference Resize: input and output types not matching");
1789
1790 return supported;
1791}
1792
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001793bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
1794 const TensorInfo& output,
1795 Optional<std::string&> reasonIfUnsupported) const
1796{
josh minor4a3c6102020-01-06 16:40:46 -06001797 return IsElementwiseUnarySupported(input,
1798 output,
1799 ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt),
1800 reasonIfUnsupported);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001801}
1802
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001803bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1804 const TensorInfo& output,
1805 const SliceDescriptor& descriptor,
1806 Optional<std::string&> reasonIfUnsupported) const
1807{
Jan Eilers8eb25602020-03-09 12:13:48 +00001808 IgnoreUnused(descriptor);
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001809 bool supported = true;
1810
Sadik Armagan303980c2020-04-17 12:45:14 +01001811 std::array<DataType, 5> supportedTypes =
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001812 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001813 DataType::BFloat16,
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001814 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01001815 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001816 DataType::QAsymmU8,
1817 DataType::QSymmS16
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001818 };
1819
1820 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1821 "Reference Slice: input type not supported");
1822
1823 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1824 "Reference Slice: output type not supported");
1825
1826 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1827 "Reference Slice: input and output types are mismatched");
1828
1829 return supported;
1830}
1831
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001832bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1833 const TensorInfo& output,
1834 const SoftmaxDescriptor& descriptor,
1835 Optional<std::string&> reasonIfUnsupported) const
1836{
Jan Eilers8eb25602020-03-09 12:13:48 +00001837 IgnoreUnused(descriptor);
nikraj01248683f2019-05-29 16:46:50 +01001838 bool supported = true;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001839 std::array<DataType,7> supportedTypes =
nikraj01248683f2019-05-29 16:46:50 +01001840 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001841 DataType::BFloat16,
1842 DataType::Float32,
1843 DataType::Float16,
1844 DataType::QSymmS8,
1845 DataType::QAsymmS8,
1846 DataType::QAsymmU8,
1847 DataType::QSymmS16
nikraj01248683f2019-05-29 16:46:50 +01001848 };
1849
1850 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001851 "Reference Softmax: output type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001852
1853 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001854 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001855
1856 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001857 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001858
1859 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001860}
1861
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001862bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1863 const TensorInfo& output,
1864 const SpaceToBatchNdDescriptor& descriptor,
1865 Optional<std::string&> reasonIfUnsupported) const
1866{
Jan Eilers8eb25602020-03-09 12:13:48 +00001867 IgnoreUnused(descriptor);
nikraj01120522a2019-05-31 11:33:07 +01001868 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001869 std::array<DataType,6> supportedTypes =
nikraj01120522a2019-05-31 11:33:07 +01001870 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001871 DataType::BFloat16,
1872 DataType::Float32,
1873 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001874 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001875 DataType::QAsymmU8,
1876 DataType::QSymmS16
nikraj01120522a2019-05-31 11:33:07 +01001877 };
1878
1879 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1880 "Reference SpaceToBatchNd: input type not supported");
1881
1882 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1883 "Reference SpaceToBatchNd: output type not supported");
1884
1885 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1886 "Reference SpaceToBatchNd: input and output types are mismatched");
1887
1888 return supported;
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001889}
1890
Keith Davisa57eccb2019-06-14 17:33:22 +01001891bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
Keith Davis51910332019-06-26 15:28:43 +01001892 const TensorInfo& output,
1893 const SpaceToDepthDescriptor& descriptor,
1894 Optional<std::string&> reasonIfUnsupported) const
Keith Davisa57eccb2019-06-14 17:33:22 +01001895{
1896
Jan Eilers8eb25602020-03-09 12:13:48 +00001897 IgnoreUnused(descriptor);
Keith Davisa57eccb2019-06-14 17:33:22 +01001898 bool supported = true;
1899
Sadik Armagan303980c2020-04-17 12:45:14 +01001900 std::array<DataType,6> supportedTypes =
Keith Davisa57eccb2019-06-14 17:33:22 +01001901 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001902 DataType::BFloat16,
Keith Davisa57eccb2019-06-14 17:33:22 +01001903 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001904 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001905 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001906 DataType::QAsymmU8,
1907 DataType::QSymmS16
Keith Davisa57eccb2019-06-14 17:33:22 +01001908 };
1909
1910 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1911 "Reference SpaceToDepth: input type not supported");
1912
1913 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1914 "Reference SpaceToDepth: output type not supported");
1915
1916 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1917 "Reference SpaceToDepth: input and output types are mismatched");
1918
1919 return supported;
1920}
1921
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001922bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1923 const ViewsDescriptor& descriptor,
1924 Optional<std::string&> reasonIfUnsupported) const
1925{
Jan Eilers8eb25602020-03-09 12:13:48 +00001926 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001927 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001928 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001929 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001930 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001931 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001932 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001933 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001934 DataType::QAsymmU8,
1935 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001936 };
1937
1938 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1939 "Reference splitter: input type not supported");
1940
1941 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001942}
1943
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001944bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1945 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1946 const ViewsDescriptor& descriptor,
1947 Optional<std::string&> reasonIfUnsupported) const
1948{
Jan Eilers8eb25602020-03-09 12:13:48 +00001949 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001950 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001951 std::array<DataType,6> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001952 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001953 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001954 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001955 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001956 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001957 DataType::QAsymmU8,
1958 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001959 };
1960
1961 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1962 "Reference splitter: output type not supported");
Derek Lambertieac4adb2020-08-25 13:05:59 +01001963 for (const TensorInfo& output : outputs)
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001964 {
1965 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1966 "Reference splitter: input type not supported");
1967
1968 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1969 "Reference splitter: input and output types mismatched.");
1970 }
1971
1972 return supported;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001973}
1974
Matthew Jackson81e601c2019-07-11 12:07:09 +01001975bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1976 const TensorInfo& output,
1977 const StackDescriptor& descriptor,
1978 Optional<std::string&> reasonIfUnsupported) const
1979{
Jan Eilers8eb25602020-03-09 12:13:48 +00001980 IgnoreUnused(descriptor);
Matthew Jackson81e601c2019-07-11 12:07:09 +01001981
1982 bool supported = true;
Sadik Armagan303980c2020-04-17 12:45:14 +01001983 std::array<DataType,6> supportedTypes =
Matthew Jackson81e601c2019-07-11 12:07:09 +01001984 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00001985 DataType::BFloat16,
Matthew Jackson81e601c2019-07-11 12:07:09 +01001986 DataType::Float32,
Matthew Jacksone69c3992019-09-09 14:31:21 +01001987 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01001988 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001989 DataType::QAsymmU8,
1990 DataType::QSymmS16
Matthew Jackson81e601c2019-07-11 12:07:09 +01001991 };
1992
1993 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1994 "Reference stack: output type not supported");
1995 for (const TensorInfo* input : inputs)
1996 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001997 ARMNN_ASSERT(input != nullptr);
Matthew Jackson81e601c2019-07-11 12:07:09 +01001998 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
1999 "Reference stack: input type not supported");
2000
2001 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
2002 "Reference stack: input and output types mismatched.");
2003 }
2004
2005 return supported;
2006}
2007
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002008bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
2009 const TensorInfo& output,
2010 const StridedSliceDescriptor& descriptor,
2011 Optional<std::string&> reasonIfUnsupported) const
2012{
Jan Eilers8eb25602020-03-09 12:13:48 +00002013 IgnoreUnused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002014 bool supported = true;
2015
Sadik Armagan303980c2020-04-17 12:45:14 +01002016 std::array<DataType,5> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002017 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002018 DataType::BFloat16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002019 DataType::Float32,
Sadik Armagan303980c2020-04-17 12:45:14 +01002020 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002021 DataType::QAsymmU8,
2022 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01002023 };
2024
2025 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2026 "Reference StridedSlice: input type not supported");
2027
2028 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2029 "Reference StridedSlice: output type not supported");
2030
2031 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2032 "Reference StridedSlice: input and output types are mismatched");
2033
2034 return supported;
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00002035}
2036
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002037bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
2038 const TensorInfo& input1,
2039 const TensorInfo& output,
2040 Optional<std::string&> reasonIfUnsupported) const
2041{
Sadik Armagan2999a022019-04-09 14:20:12 +01002042 bool supported = true;
2043
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002044 std::array<DataType,7> supportedTypes = {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002045 DataType::BFloat16,
Sadik Armagan2999a022019-04-09 14:20:12 +01002046 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002047 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002048 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002049 DataType::QAsymmU8,
Teresa Charlinecb6b8e2020-05-22 18:08:23 +01002050 DataType::QSymmS16,
2051 DataType::Signed32
Sadik Armagan2999a022019-04-09 14:20:12 +01002052 };
2053
2054 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
2055 "Reference subtraction: input 0 is not a supported type.");
2056
2057 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
2058 "Reference subtraction: input 1 is not a supported type.");
2059
2060 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2061 "Reference subtraction: output is not a supported type.");
2062
2063 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
2064 "Reference subtraction: input 0 and Input 1 types are mismatched");
2065
2066 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
2067 "Reference subtraction: input and output types are mismatched");
2068
2069 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
2070 "Reference subtraction: shapes are not suitable for implicit broadcast.");
2071
2072 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01002073}
2074
Matteo Martincighab9e5252019-06-13 17:27:46 +01002075bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
2076 const TensorInfo& alpha,
2077 const TensorInfo& output,
2078 Optional<std::string&> reasonIfUnsupported) const
2079{
2080 bool supported = true;
2081
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002082 std::array<DataType, 6> supportedTypes
Matteo Martincighab9e5252019-06-13 17:27:46 +01002083 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002084 DataType::BFloat16,
Matteo Martincighab9e5252019-06-13 17:27:46 +01002085 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01002086 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002087 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002088 DataType::QAsymmU8,
Teresa Charlin3940d8b2020-05-29 16:47:23 +01002089 DataType::QSymmS16
Matteo Martincighab9e5252019-06-13 17:27:46 +01002090 };
2091
2092 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2093 "PReLU: input is not a supported type.");
2094
2095 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
2096 "PReLU: alpha is not a supported type.");
2097
2098 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2099 "PReLU: output is not a supported type.");
2100
2101 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
2102 "PReLU: input, alpha and output types are mismatched");
2103
2104 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
2105 "PReLU: shapes are not suitable for implicit broadcast");
2106
2107 return supported;
2108}
2109
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002110bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
2111 const TensorInfo& output,
2112 const TransposeConvolution2dDescriptor& descriptor,
2113 const TensorInfo& weights,
2114 const Optional<TensorInfo>& biases,
2115 Optional<std::string&> reasonIfUnsupported) const
2116{
Jan Eilers8eb25602020-03-09 12:13:48 +00002117 IgnoreUnused(descriptor);
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002118 bool supported = true;
2119
Sadik Armagan303980c2020-04-17 12:45:14 +01002120 std::array<DataType,7> supportedTypes =
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002121 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002122 DataType::BFloat16,
2123 DataType::Float32,
2124 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002125 DataType::QAsymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002126 DataType::QAsymmU8,
Sadik Armagan303980c2020-04-17 12:45:14 +01002127 DataType::QSymmS8,
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002128 DataType::QSymmS16
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002129 };
2130
2131 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2132 "Reference TransposeConvolution2d: input is not a supported type.");
2133
2134 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2135 "Reference TransposeConvolution2d: output is not a supported type.");
2136
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002137 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2138 "Reference TransposeConvolution2d: input and output types mismatched.");
2139
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002140
2141 const DataType inputType = input.GetDataType();
Sadik Armagan303980c2020-04-17 12:45:14 +01002142 if (IsQuantized8BitType(inputType))
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002143 {
Derek Lambertid466a542020-01-22 15:37:29 +00002144 ARMNN_NO_DEPRECATE_WARN_BEGIN
Sadik Armagan303980c2020-04-17 12:45:14 +01002145 std::array<DataType, 4> supportedWeightTypes =
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002146 {
Sadik Armagan303980c2020-04-17 12:45:14 +01002147 DataType::QAsymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00002148 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +00002149 DataType::QSymmS8,
2150 DataType::QuantizedSymm8PerAxis //Deprecated
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002151 };
Derek Lambertid466a542020-01-22 15:37:29 +00002152 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00002153
2154 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
2155 "Reference TransposeConvolution2d: weights type not supported for "
2156 "quantized input.");
2157 }
2158 else
2159 {
2160 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
2161 "Reference TransposeConvolution2d: weights is not a supported type.");
2162
2163 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
2164 "Reference TransposeConvolution2d: input and weights types mismatched.");
2165 }
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002166
2167 if (biases.has_value())
2168 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002169 std::array<DataType,4> biasesSupportedTypes =
Aron Virginas-Tar651aafe2019-08-05 11:52:05 +01002170 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002171 DataType::BFloat16,
2172 DataType::Float32,
2173 DataType::Float16,
2174 DataType::Signed32
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01002175 };
2176 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
2177 "Reference TransposeConvolution2d: biases is not a supported type.");
2178 }
2179
2180 return supported;
2181}
2182
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002183bool RefLayerSupport::IsTransposeSupported(const TensorInfo& input,
2184 const TensorInfo& output,
2185 const TransposeDescriptor& descriptor,
2186 Optional<std::string&> reasonIfUnsupported) const
2187{
Jan Eilers8eb25602020-03-09 12:13:48 +00002188 IgnoreUnused(descriptor);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002189 bool supported = true;
2190
2191 // Define supported output and inputs types.
Sadik Armagan303980c2020-04-17 12:45:14 +01002192 std::array<DataType, 6> supportedTypes =
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002193 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +00002194 DataType::BFloat16,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002195 DataType::Float32,
2196 DataType::Float16,
Sadik Armagan303980c2020-04-17 12:45:14 +01002197 DataType::QAsymmS8,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002198 DataType::QAsymmU8,
2199 DataType::QSymmS16
2200 };
2201
2202 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
2203 "Reference transpose: input is not a supported type.");
2204
2205 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
2206 "Reference transpose: output is not a supported type.");
2207
2208 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
2209 "Reference transpose: input and output types are mismatched.");
2210
2211 return supported;
2212}
2213
arovir011c7c81b2018-10-08 11:34:28 +01002214} // namespace armnn