blob: c6a3af46bf7ebf4a58a09a571440ae3794193ded [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
telsoa014fcda012018-03-09 14:13:49 +00006#include "RefLayerSupport.hpp"
David Beck3e9e1152018-10-17 14:17:50 +01007#include "RefBackendId.hpp"
David Beck3cc9a622018-10-12 10:38:31 +01008
telsoa014fcda012018-03-09 14:13:49 +00009#include <armnn/Types.hpp>
Derek Lamberti50db4e82019-03-13 14:16:15 +000010#include <armnn/Descriptors.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000011#include <armnn/BackendRegistry.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Matteo Martincighe011d202019-11-28 11:35:47 +000013#include <armnnUtils/DataLayoutIndexed.hpp>
14
15#include <InternalTypes.hpp>
16#include <LayerSupportCommon.hpp>
17
Derek Lambertif674aa02019-08-01 15:56:25 +010018#include <backendsCommon/LayerSupportRules.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Matteo Martincighe011d202019-11-28 11:35:47 +000020#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000021#include <boost/core/ignore_unused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022
Derek Lamberti50db4e82019-03-13 14:16:15 +000023#include <vector>
24#include <algorithm>
25#include <array>
26
telsoa014fcda012018-03-09 14:13:49 +000027using namespace boost;
28
29namespace armnn
30{
31
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010032namespace
33{
34
35template<typename Float32Func, typename Uint8Func, typename ... Params>
36bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
37 DataType dataType,
38 Float32Func floatFuncPtr,
39 Uint8Func uint8FuncPtr,
40 Params&&... params)
41{
42 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
43 dataType,
44 &FalseFunc<Params...>,
45 floatFuncPtr,
46 uint8FuncPtr,
narpra01db2b1602019-01-23 15:23:11 +000047 &FalseFunc<Params...>,
kevmay012b4d88e2019-01-24 14:05:09 +000048 &FalseFunc<Params...>,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +010049 std::forward<Params>(params)...);
50}
51
52} // anonymous namespace
53
James Conroy4d1ff582019-06-10 17:06:39 +010054namespace
55{
56
57std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
58 unsigned int actual,
59 std::string& layerStr,
60 std::string& tensorName)
61{
62 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
63 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
64
65 return errorMsg;
66}
67
68} // anonymous namespace
Derek Lamberti50db4e82019-03-13 14:16:15 +000069
Sadik Armagan9199e582019-09-05 17:35:31 +010070bool RefLayerSupport::IsAbsSupported(const TensorInfo& input, const TensorInfo& output,
71 Optional<std::string&> reasonIfUnsupported) const
72{
josh minor4a3c6102020-01-06 16:40:46 -060073 return IsElementwiseUnarySupported(input,
74 output,
75 ElementwiseUnaryDescriptor(UnaryOperation::Abs),
76 reasonIfUnsupported);
Sadik Armagan9199e582019-09-05 17:35:31 +010077}
78
arovir011c7c81b2018-10-08 11:34:28 +010079bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
80 const TensorInfo& output,
81 const ActivationDescriptor& descriptor,
82 Optional<std::string&> reasonIfUnsupported) const
83{
Derek Lamberti50db4e82019-03-13 14:16:15 +000084 bool supported = true;
85
86 // Define supported types.
Matthew Jackson252df3a2019-09-11 09:19:18 +010087 std::array<DataType,4> supportedTypes = {
Derek Lamberti50db4e82019-03-13 14:16:15 +000088 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +010089 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +000090 DataType::QAsymmU8,
91 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +000092 };
93
94 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
95 "Reference activation: input type not supported.");
96
97 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
98 "Reference activation: output type not supported.");
99
100 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
101 "Reference activation: input and output types mismatched.");
102
103 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
104 "Reference activation: input and output shapes are of different rank.");
105
106
107 struct ActivationFunctionSupported : public Rule
108 {
109 ActivationFunctionSupported(const ActivationDescriptor& desc)
110 {
111 switch(desc.m_Function)
112 {
113 case ActivationFunction::Abs:
114 case ActivationFunction::BoundedReLu:
115 case ActivationFunction::LeakyReLu:
116 case ActivationFunction::Linear:
117 case ActivationFunction::ReLu:
118 case ActivationFunction::Sigmoid:
119 case ActivationFunction::SoftReLu:
120 case ActivationFunction::Sqrt:
121 case ActivationFunction::Square:
122 case ActivationFunction::TanH:
123 {
124 m_Res = true;
125 break;
126 }
127 default:
128 {
129 m_Res = false;
130 break;
131 }
132 }
133 }
134 };
135
136 // Function is supported
137 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
138 "Reference activation: function not supported.");
139
140 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100141}
142
143bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
144 const TensorInfo& input1,
145 const TensorInfo& output,
146 Optional<std::string&> reasonIfUnsupported) const
147{
Derek Lamberti50db4e82019-03-13 14:16:15 +0000148 bool supported = true;
149
Keith Davis5204aa82020-01-27 15:24:59 +0000150 std::array<DataType,5> supportedTypes = {
Derek Lamberti50db4e82019-03-13 14:16:15 +0000151 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100152 DataType::Float16,
Keith Davis5204aa82020-01-27 15:24:59 +0000153 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000154 DataType::QAsymmU8,
155 DataType::QSymmS16
Derek Lamberti50db4e82019-03-13 14:16:15 +0000156 };
157
158 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
159 "Reference addition: input 0 is not a supported type.");
160
161 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
162 "Reference addition: input 1 is not a supported type.");
163
164 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
165 "Reference addition: output is not a supported type.");
166
167 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
168 "Reference addition: input 0 and Input 1 types are mismatched");
169
170 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
171 "Reference addition: input and output types are mismatched");
172
173 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
174 "Reference addition: shapes are not suitable for implicit broadcast.");
175
176 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100177}
178
Nikhil Raj68c2c902019-09-19 11:21:11 +0100179bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
180 const armnn::ArgMinMaxDescriptor &descriptor,
181 armnn::Optional<std::string &> reasonIfUnsupported) const
182{
183 ignore_unused(descriptor);
184
Francis Murtagh1939df52019-11-13 15:21:09 +0000185 std::array<DataType, 4> supportedTypes =
Nikhil Raj68c2c902019-09-19 11:21:11 +0100186 {
187 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000188 DataType::QAsymmU8,
189 DataType::QSymmS16,
Francis Murtagh1939df52019-11-13 15:21:09 +0000190 DataType::Signed32
Nikhil Raj68c2c902019-09-19 11:21:11 +0100191 };
192
193 bool supported = true;
194
195 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
196 "Reference ArgMinMax: input is not a supported type.");
197 supported &= CheckSupportRule(TypeIs(output, DataType::Signed32), reasonIfUnsupported,
198 "Reference ArgMinMax: output type not supported");
199
200 return supported;
201}
202
arovir011c7c81b2018-10-08 11:34:28 +0100203bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
204 const TensorInfo& output,
205 const TensorInfo& mean,
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100206 const TensorInfo& variance,
arovir011c7c81b2018-10-08 11:34:28 +0100207 const TensorInfo& beta,
208 const TensorInfo& gamma,
209 const BatchNormalizationDescriptor& descriptor,
210 Optional<std::string&> reasonIfUnsupported) const
211{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100212 ignore_unused(descriptor);
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100213
Matthew Jackson9bff1442019-09-12 09:08:23 +0100214 std::array<DataType, 4> supportedTypes =
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100215 {
216 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100217 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000218 DataType::QAsymmU8,
219 DataType::QSymmS16
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100220 };
221
222 bool supported = true;
223
224 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
225 "Reference batch normalization: input is not a supported type.");
226
227 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
228 "Reference batch normalization: output is not a supported type.");
229
230 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
231 "Reference batch normalization: input and output types are mismatched");
232
233 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
234 "Reference batch normalization: mean is not a supported type.");
235
236 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
237 "Reference batch normalization: variance is not a supported type.");
238
239 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
240 "Reference batch normalization: beta is not a supported type.");
241
242 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
243 "Reference batch normalization: gamma is not a supported type.");
244
245 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100246}
247
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000248bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
249 const TensorInfo& output,
250 const BatchToSpaceNdDescriptor& descriptor,
251 Optional<std::string&> reasonIfUnsupported) const
252{
253 ignore_unused(descriptor);
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100254
255 bool supported = true;
256
257 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
258 std::string inputTensorStr = "input";
259 std::string outputTensorStr = "output";
260
261 // Define supported types.
Matthew Jackson9bff1442019-09-12 09:08:23 +0100262 std::array<DataType,4> supportedTypes =
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100263 {
264 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100265 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000266 DataType::QAsymmU8,
267 DataType::QSymmS16
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100268 };
269
270 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
271 "Reference BatchToSpaceNd: input type not supported.");
272
273 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
274 "Reference BatchToSpaceNd: output type not supported.");
275
276 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
277 "Reference BatchToSpaceNd: input and output types mismatched.");
278
279 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
280 reasonIfUnsupported,
281 CreateIncorrectDimensionsErrorMsg(4,
282 output.GetNumDimensions(),
283 batchToSpaceNdLayerStr,
284 outputTensorStr).data());
285
286 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
287 reasonIfUnsupported,
288 CreateIncorrectDimensionsErrorMsg(4,
289 input.GetNumDimensions(),
290 batchToSpaceNdLayerStr,
291 inputTensorStr).data());
292
293 return supported;
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000294}
295
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100296bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
297 const TensorInfo& input1,
298 const TensorInfo& output,
299 const ComparisonDescriptor& descriptor,
300 Optional<std::string&> reasonIfUnsupported) const
301{
302 boost::ignore_unused(descriptor);
303
304 std::array<DataType, 4> supportedInputTypes =
305 {
306 DataType::Float32,
307 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000308 DataType::QAsymmU8,
309 DataType::QSymmS16
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100310 };
311
312 bool supported = true;
313 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
314 "Reference comparison: input 0 is not a supported type");
315
316 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
317 "Reference comparison: input 0 and Input 1 types are mismatched");
318
319 supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
320 "Reference comparison: output is not of type Boolean");
321
322 return supported;
323}
324
Jim Flynn906f9462019-05-10 13:55:21 +0100325bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
326 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +0100327 const ConcatDescriptor& descriptor,
Jim Flynn906f9462019-05-10 13:55:21 +0100328 Optional<std::string&> reasonIfUnsupported) const
329{
Jim Flynne242f2d2019-05-22 14:24:13 +0100330 ignore_unused(descriptor);
331
332 bool supported = true;
Keith Davis5204aa82020-01-27 15:24:59 +0000333 std::array<DataType,5> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100334 {
335 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100336 DataType::Float16,
Keith Davis5204aa82020-01-27 15:24:59 +0000337 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000338 DataType::QAsymmU8,
339 DataType::QSymmS16
Jim Flynne242f2d2019-05-22 14:24:13 +0100340 };
341
342 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
343 "Reference concatenation: output type not supported");
344 for (const TensorInfo* input : inputs)
345 {
Matthew Jackson81e601c2019-07-11 12:07:09 +0100346 BOOST_ASSERT(input != nullptr);
Jim Flynne242f2d2019-05-22 14:24:13 +0100347 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
348 "Reference concatenation: input type not supported");
349
350 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
351 "Reference concatenation: input and output types mismatched.");
352 }
353
354 return supported;
Jim Flynn906f9462019-05-10 13:55:21 +0100355}
356
arovir011c7c81b2018-10-08 11:34:28 +0100357bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
358 Optional<std::string&> reasonIfUnsupported) const
359{
Keith Davis5204aa82020-01-27 15:24:59 +0000360 std::array<DataType,5> supportedTypes =
Jim Flynne242f2d2019-05-22 14:24:13 +0100361 {
Nina Drozd58ef2c62019-05-16 12:09:18 +0100362 DataType::Float32,
363 DataType::Signed32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000364 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000365 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000366 DataType::QSymmS16
Nina Drozd58ef2c62019-05-16 12:09:18 +0100367 };
368
369 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
370 "Reference constant: output is not a supported type.");
arovir011c7c81b2018-10-08 11:34:28 +0100371}
372
373bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
374 const TensorInfo& output,
375 Optional<std::string&> reasonIfUnsupported) const
376{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100377 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
378 input.GetDataType(),
379 &TrueFunc<>,
380 &FalseInputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000381 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000382 &FalseFuncI32<>,
383 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100384 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
385 output.GetDataType(),
386 &FalseOutputFuncF16<>,
387 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000388 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000389 &FalseFuncI32<>,
390 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100391}
392
393bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
394 const TensorInfo& output,
395 Optional<std::string&> reasonIfUnsupported) const
396{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100397 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
398 input.GetDataType(),
399 &FalseInputFuncF16<>,
400 &TrueFunc<>,
narpra01db2b1602019-01-23 15:23:11 +0000401 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000402 &FalseFuncI32<>,
403 &FalseFuncU8<>) &&
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100404 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
405 output.GetDataType(),
406 &TrueFunc<>,
407 &FalseOutputFuncF32<>,
narpra01db2b1602019-01-23 15:23:11 +0000408 &FalseFuncU8<>,
kevmay012b4d88e2019-01-24 14:05:09 +0000409 &FalseFuncI32<>,
410 &FalseFuncU8<>));
arovir011c7c81b2018-10-08 11:34:28 +0100411}
412
413bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
414 const TensorInfo& output,
415 const Convolution2dDescriptor& descriptor,
416 const TensorInfo& weights,
417 const Optional<TensorInfo>& biases,
418 Optional<std::string&> reasonIfUnsupported) const
419{
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100420 bool supported = true;
421
422 // Define supported types.
Keith Davis5204aa82020-01-27 15:24:59 +0000423 std::array<DataType,5> supportedTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000424 {
425 DataType::Float32,
426 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000427 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000428 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000429 DataType::QSymmS16
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100430 };
431
432 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000433 "Reference Convolution2d: input is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100434
435 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000436 "Reference Convolution2d: output is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100437
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100438 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000439 "Reference Convolution2d: input and output types mismatched.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100440
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000441 const DataType inputType = input.GetDataType();
Derek Lambertif90c56d2020-01-10 17:14:08 +0000442 if (inputType == DataType::QAsymmU8)
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000443 {
Derek Lambertid466a542020-01-22 15:37:29 +0000444 ARMNN_NO_DEPRECATE_WARN_BEGIN
445 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000446 {
Derek Lambertif90c56d2020-01-10 17:14:08 +0000447 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +0000448 DataType::QSymmS8,
449 DataType::QuantizedSymm8PerAxis // deprecated
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000450 };
Derek Lambertid466a542020-01-22 15:37:29 +0000451 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000452
453 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000454 "Reference Convolution2d: weights type not supported for quantized input.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000455 }
456 else
457 {
458 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000459 "Reference Convolution2d: weights is not a supported type.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000460
461 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000462 "Reference Convolution2d: input and weights types mismatched.");
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000463 }
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100464
465 if (biases.has_value())
466 {
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000467 std::array<DataType,3> biasesSupportedTypes =
468 {
469 DataType::Float32,
470 DataType::Float16,
471 DataType::Signed32
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100472 };
Aron Virginas-Tar5edc8812019-11-05 18:00:21 +0000473
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100474 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000475 "Reference Convolution2d: biases is not a supported type.");
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100476 }
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100477 ignore_unused(descriptor);
Mike Kelly2f80f6e2019-05-16 12:41:34 +0100478
479 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100480}
481
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000482bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
483 const TensorInfo& output,
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000484 Optional<std::string&> reasonIfUnsupported) const
485{
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100486 bool supported = true;
487
Keith Davis5204aa82020-01-27 15:24:59 +0000488 std::array<DataType, 6> supportedTypes =
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100489 {
Aron Virginas-Tardb1a2832019-11-12 16:15:11 +0000490 DataType::Float16,
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100491 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000492 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +0000493 DataType::QSymmS8,
Narumol Prangnawaratd2d917d2020-01-09 10:16:39 +0000494 DataType::QSymmS16,
495 DataType::Signed32
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100496 };
497
498 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000499 "Reference for Debug layer: input type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100500
501 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000502 "Reference for Debug layer: output type not supported");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100503
504 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000505 "Reference for Debug layer: input and output types are mismatched");
Narumol Prangnawarat47cfee92019-07-04 10:29:00 +0100506
507 return supported;
Nattapat Chaimanowongcfdcadf2018-12-06 11:54:33 +0000508}
509
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100510bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
511 const TensorInfo& output,
512 const DepthToSpaceDescriptor& descriptor,
513 Optional<std::string&> reasonIfUnsupported) const
514{
515 ignore_unused(descriptor);
516 bool supported = true;
517
518 std::array<DataType,4> supportedTypes =
519 {
520 DataType::Float32,
521 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000522 DataType::QAsymmU8,
523 DataType::QSymmS16
Aron Virginas-Tar73f66422019-09-23 19:11:59 +0100524 };
525
526 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
527 "Reference DepthToSpace: input type not supported");
528
529 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
530 "Reference DepthToSpace: output type not supported");
531
532 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
533 "Reference DepthToSpace: input and output types are mismatched");
534
535 return supported;
536}
537
arovir011c7c81b2018-10-08 11:34:28 +0100538bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
539 const TensorInfo& output,
540 const DepthwiseConvolution2dDescriptor& descriptor,
541 const TensorInfo& weights,
542 const Optional<TensorInfo>& biases,
543 Optional<std::string&> reasonIfUnsupported) const
544{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100545 bool supported = true;
546
547 // Define supported types.
Matthew Jackson252df3a2019-09-11 09:19:18 +0100548 std::array<DataType,4> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100549 {
550 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100551 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000552 DataType::QAsymmU8,
553 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100554 };
555
556 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
557 "Reference DepthwiseConvolution2d: input is not a supported type.");
558
559 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
560 "Reference DepthwiseConvolution2d: output is not a supported type.");
561
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100562 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
563 "Reference DepthwiseConvolution2d: input and output types mismatched.");
564
Derek Lambertid466a542020-01-22 15:37:29 +0000565 ARMNN_NO_DEPRECATE_WARN_BEGIN
566 std::array<DataType, 3> supportedWeightTypes =
567 {
568 DataType::QAsymmU8,
569 DataType::QSymmS8,
570 DataType::QuantizedSymm8PerAxis // deprecated
571 };
572 ARMNN_NO_DEPRECATE_WARN_END
573
Teresa Charlind8df0262019-11-11 12:28:15 +0000574 const DataType inputType = input.GetDataType();
Derek Lambertif90c56d2020-01-10 17:14:08 +0000575 if (inputType == DataType::QAsymmU8)
Teresa Charlind8df0262019-11-11 12:28:15 +0000576 {
Teresa Charlind8df0262019-11-11 12:28:15 +0000577
578 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
579 "Reference convolution2d: weights type not supported for quantized input.");
580 }
581 else
582 {
583 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
584 "Reference DepthwiseConvolution2d: weights is not a supported type.");
585
586 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
587 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
588 }
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100589
590 if (biases.has_value())
591 {
Matthew Jackson252df3a2019-09-11 09:19:18 +0100592 std::array<DataType,3> biasesSupportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100593 {
594 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100595 DataType::Float16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100596 DataType::Signed32
597 };
598 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
599 "Reference DepthwiseConvolution2d: biases is not a supported type.");
600 }
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100601 ignore_unused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100602
603 return supported;
604
arovir011c7c81b2018-10-08 11:34:28 +0100605}
606
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000607bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
608 const TensorInfo& output,
609 Optional<std::string&> reasonIfUnsupported) const
610{
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100611 bool supported = true;
612
Finn Williamsfd271062019-12-04 14:27:27 +0000613 std::array<DataType,3> supportedInputTypes = {
Derek Lambertif90c56d2020-01-10 17:14:08 +0000614 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +0000615 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000616 DataType::QSymmS16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100617 };
618
619 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000620 "Reference for Dequantize layer: input type not supported.");
621
622 supported &= CheckSupportRule( TypeNotPerAxisQuantized(input), reasonIfUnsupported,
623 "Reference for Dequantize layer: per-axis quantized input not support .");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100624
Derek Lambertid466a542020-01-22 15:37:29 +0000625 supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
626 "Reference dequantize: per-axis quantized input not support .");
627
Jan Eilersf7107932019-11-01 11:09:36 +0000628 std::array<DataType,2> supportedOutputTypes = {
629 DataType::Float32,
630 DataType::Float16
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100631 };
632
633 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000634 "Reference for Dequantize layer: output type not supported.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100635
636 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
Keith Davis5204aa82020-01-27 15:24:59 +0000637 "Reference for Dequantize layer: input/output shapes have different num total "
638 "elements.");
Nattapat Chaimanowongafa4e3a2019-04-02 11:41:45 +0100639
640 return supported;
Nattapat Chaimanowong8a54ac02019-03-29 15:25:04 +0000641}
642
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000643bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
644 const TensorInfo& scores,
645 const TensorInfo& anchors,
646 const TensorInfo& detectionBoxes,
647 const TensorInfo& detectionClasses,
648 const TensorInfo& detectionScores,
649 const TensorInfo& numDetections,
650 const DetectionPostProcessDescriptor& descriptor,
651 Optional<std::string&> reasonIfUnsupported) const
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000652{
Derek Lamberti901ea112019-12-10 22:07:09 +0000653 boost::ignore_unused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
654
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100655 bool supported = true;
656
Mike Kelly4992c342019-08-14 11:33:11 +0100657 std::array<DataType,3> supportedInputTypes =
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100658 {
659 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000660 DataType::QAsymmU8,
661 DataType::QSymmS16
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100662 };
663
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000664 supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100665 "Reference DetectionPostProcess: input 0 is not a supported type.");
666
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000667 supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
Aron Virginas-Tara37e1bd2019-06-06 16:08:30 +0100668 "Reference DetectionPostProcess: input 1 is not a supported type.");
669
670 return supported;
Narumol Prangnawaratbc67cef2019-01-31 15:31:54 +0000671}
672
Pablo Tellof0bd6832019-04-26 17:58:13 +0100673bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
674 const TensorInfo& output,
675 const DepthwiseConvolution2dDescriptor& descriptor,
676 const TensorInfo& weights,
677 const Optional<TensorInfo>& biases,
678 Optional<std::string&> reasonIfUnsupported) const
679{
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100680 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
Pablo Tellof0bd6832019-04-26 17:58:13 +0100681}
682
Aron Virginas-Taraece4ed2019-06-14 17:00:09 +0100683bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
arovir011c7c81b2018-10-08 11:34:28 +0100684 const TensorInfo& input1,
685 const TensorInfo& output,
686 Optional<std::string&> reasonIfUnsupported) const
687{
Sadik Armagan2999a022019-04-09 14:20:12 +0100688 bool supported = true;
689
Matthew Jackson9bff1442019-09-12 09:08:23 +0100690 std::array<DataType,4> supportedTypes = {
Sadik Armagan2999a022019-04-09 14:20:12 +0100691 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100692 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000693 DataType::QAsymmU8,
694 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +0100695 };
696
697 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
698 "Reference division: input 0 is not a supported type.");
699
700 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
701 "Reference division: input 1 is not a supported type.");
702
703 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
704 "Reference division: output is not a supported type.");
705
706 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
707 "Reference division: input 0 and Input 1 types are mismatched");
708
709 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
710 "Reference division: input and output types are mismatched");
711
712 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
713 "Reference division: shapes are not suitable for implicit broadcast.");
714
715 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100716}
717
josh minor4a3c6102020-01-06 16:40:46 -0600718bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
719 const TensorInfo& output,
720 const ElementwiseUnaryDescriptor& descriptor,
721 Optional<std::string&> reasonIfUnsupported) const
722{
723 boost::ignore_unused(descriptor);
724
725 std::array<DataType, 4> supportedTypes =
726 {
727 DataType::Float32,
728 DataType::Float16,
729 DataType::QAsymmU8,
730 DataType::QSymmS16
731 };
732
733 bool supported = true;
734
735 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
736 "Reference elementwise unary: input type not supported");
737
738 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
739 "Reference elementwise unary: output type not supported");
740
741 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
742 "Reference elementwise unary: input and output types not matching");
743
744 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
745 "Reference elementwise unary: input and output shapes"
746 "have different number of total elements");
747
748 return supported;
749}
750
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000751bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
752 const TensorInfo& input1,
753 const TensorInfo& output,
754 Optional<std::string&> reasonIfUnsupported) const
755{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100756 return IsComparisonSupported(input0,
757 input1,
758 output,
759 ComparisonDescriptor(ComparisonOperation::Equal),
760 reasonIfUnsupported);
FrancisMurtagh30cdfca2018-12-18 12:57:35 +0000761}
762
arovir011c7c81b2018-10-08 11:34:28 +0100763bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
764 const FakeQuantizationDescriptor& descriptor,
765 Optional<std::string&> reasonIfUnsupported) const
766{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100767 ignore_unused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100768 bool supported = true;
769
770 std::array<DataType,1> supportedTypes =
771 {
772 DataType::Float32
773 };
774
775 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
776 "Reference fake quantization: input type not supported.");
777
778 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100779}
780
781bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
782 const TensorInfo& output,
783 Optional<std::string&> reasonIfUnsupported) const
784{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100785 ignore_unused(output);
James Conroy83735b12019-05-30 16:36:59 +0100786 bool supported = true;
787
Matthew Jackson9bff1442019-09-12 09:08:23 +0100788 std::array<DataType,3> supportedTypes =
James Conroy83735b12019-05-30 16:36:59 +0100789 {
James Conroyb40d7102019-06-04 12:32:09 +0100790 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100791 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000792 DataType::QSymmS16
James Conroy83735b12019-05-30 16:36:59 +0100793 };
794
795 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
796 "Reference Floor: input type not supported.");
797
798 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
799 "Reference Floor: output type not supported.");
800
801 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100802}
803
804bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
805 const TensorInfo& output,
806 const TensorInfo& weights,
807 const TensorInfo& biases,
808 const FullyConnectedDescriptor& descriptor,
809 Optional<std::string&> reasonIfUnsupported) const
810{
Francis Murtagh46c09d02019-05-28 08:15:28 +0100811 bool supported = true;
812
813 // Define supported types.
Matthew Jackson252df3a2019-09-11 09:19:18 +0100814 std::array<DataType,4> supportedTypes =
Francis Murtagh46c09d02019-05-28 08:15:28 +0100815 {
816 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100817 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000818 DataType::QAsymmU8,
819 DataType::QSymmS16
Francis Murtagh46c09d02019-05-28 08:15:28 +0100820 };
821
822 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
823 "Reference Fully Connected: input type not supported.");
824
825 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
826 "Reference Fully Connected: output type not supported.");
827
828 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
829 "Reference Fully Connected: input and output types mismatched.");
830
831 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
832 "Reference Fully Connected: weights type not supported.");
833
834 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
835 "Reference Fully Connected: input and weight types mismatched.");
836
837 if (descriptor.m_BiasEnabled)
838 {
839 // Defined supported types for bias
Matthew Jackson252df3a2019-09-11 09:19:18 +0100840 std::array<DataType, 3>
Francis Murtagh46c09d02019-05-28 08:15:28 +0100841 supportedBiasTypes =
842 {
843 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100844 DataType::Float16,
Francis Murtagh46c09d02019-05-28 08:15:28 +0100845 DataType::Signed32
846 };
847
848 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
849 "Reference Fully Connected: bias type not supported.");
850
851 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
852 "Reference Fully Connected: bias and weight types mismatch.");
853
854 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
855 "Reference Fully Connected: bias type inferred from weights is incompatible.");
856
857 }
858
859 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100860}
861
narpra014951d842019-01-18 16:53:53 +0000862bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
863 const armnn::TensorInfo& input1,
864 const armnn::TensorInfo& output,
865 armnn::Optional<std::string&> reasonIfUnsupported) const
866{
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100867 bool supported = true;
Matthew Jackson9bff1442019-09-12 09:08:23 +0100868 std::array<DataType,4> supportedTypes =
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100869 {
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100870 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +0100871 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000872 DataType::QAsymmU8,
873 DataType::QSymmS16
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100874 };
875
876 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
877 "Reference Gather: input type not supported");
878
879 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
880 "Reference Gather: output type not supported");
881
882 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
883 "Reference Gather: indices (input1) type not supported");
884
885 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
886 "Reference Gather: input and output types not matching");
887
888 return supported;
narpra014951d842019-01-18 16:53:53 +0000889}
890
FrancisMurtagh878f0232018-12-19 10:56:15 +0000891bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
892 const TensorInfo& input1,
893 const TensorInfo& output,
894 Optional<std::string&> reasonIfUnsupported) const
895{
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100896 return IsComparisonSupported(input0,
897 input1,
898 output,
899 ComparisonDescriptor(ComparisonOperation::Greater),
900 reasonIfUnsupported);
FrancisMurtagh878f0232018-12-19 10:56:15 +0000901}
902
Derek Lamberti901ea112019-12-10 22:07:09 +0000903bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
904 Optional<std::string&> /*reasonIfUnsupported*/) const
arovir011c7c81b2018-10-08 11:34:28 +0100905{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +0100906 return true;
arovir011c7c81b2018-10-08 11:34:28 +0100907}
908
Kevin May09ca49c2019-10-09 12:37:34 +0100909bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
910 const TensorInfo& output,
911 const InstanceNormalizationDescriptor& descriptor,
912 Optional<std::string&> reasonIfUnsupported) const
913{
914 ignore_unused(descriptor);
915 // Define supported types
916 std::array<DataType, 4> supportedTypes =
917 {
918 DataType::Float32,
919 DataType::Float16
920 };
921
922 bool supported = true;
923
924 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
925 "Reference Instance Normalization: input type not supported.");
926
927 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
928 "Reference Instance Normalization: output type not supported.");
929
930 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
931 "Reference Instance Normalization: input and output types mismatched.");
932
933 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
934 "Reference Instance Normalization: input and output shapes have different "
935 "num total elements.");
936
937 return supported;
938}
939
arovir011c7c81b2018-10-08 11:34:28 +0100940bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
941 const TensorInfo& output,
942 const L2NormalizationDescriptor& descriptor,
943 Optional<std::string&> reasonIfUnsupported) const
944{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +0100945 ignore_unused(descriptor);
Ferran Balaguerd73d14f2019-06-10 10:29:54 +0100946 // Define supported types
Matthew Jackson252df3a2019-09-11 09:19:18 +0100947 std::array<DataType, 4> supportedTypes =
Ferran Balaguerd73d14f2019-06-10 10:29:54 +0100948 {
949 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +0100950 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +0000951 DataType::QAsymmU8,
952 DataType::QSymmS16
Ferran Balaguerd73d14f2019-06-10 10:29:54 +0100953 };
954
955 bool supported = true;
956
957 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
958 "Reference L2normalization: input type not supported.");
959
960 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
961 "Reference L2normalization: output type not supported.");
962
963 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
964 "Reference L2normalization: input and output types mismatched.");
965
966 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
967 "Reference L2normalization: input and output shapes have different "
968 "num total elements.");
969
970 return supported;
arovir011c7c81b2018-10-08 11:34:28 +0100971}
972
Aron Virginas-Tare662a942019-10-14 15:12:00 +0100973bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
974 const TensorInfo& output,
975 const LogSoftmaxDescriptor& descriptor,
976 Optional<std::string&> reasonIfUnsupported) const
977{
978 ignore_unused(descriptor);
979
980 std::array<DataType, 2> supportedTypes =
981 {
982 DataType::Float32,
983 DataType::Float16
984 };
985
986 bool supported = true;
987 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
988 "Reference LogSoftmax: input type not supported");
989
990 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
991 "Reference LogSoftmax: output type not supported");
992
993 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
994 "Reference LogSoftmax: input and output types do not match");
995
996 return supported;
997}
998
arovir011c7c81b2018-10-08 11:34:28 +0100999bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1000 const TensorInfo& outputStateIn,
1001 const TensorInfo& cellStateIn,
1002 const TensorInfo& scratchBuffer,
1003 const TensorInfo& outputStateOut,
1004 const TensorInfo& cellStateOut,
1005 const TensorInfo& output,
1006 const LstmDescriptor& descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001007 const LstmInputParamsInfo& paramsInfo,
1008 Optional<std::string&> reasonIfUnsupported) const
arovir011c7c81b2018-10-08 11:34:28 +01001009{
telsoa01c577f2c2018-08-31 09:22:23 +01001010 ignore_unused(descriptor);
Jan Eilersd01a83c2019-07-03 18:20:40 +01001011 ignore_unused(paramsInfo);
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001012
1013 bool supported = true;
1014
1015 std::array<DataType,2> supportedTypes = {
Conor Kennedyb9971c92019-05-07 07:14:23 +01001016 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001017 DataType::QSymmS16
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001018 };
1019
Jan Eilersd01a83c2019-07-03 18:20:40 +01001020 // check inputs and outputs
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001021 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1022 "Reference Lstm: input is not a supported type.");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001023 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1024 "Reference Lstm: input and outputStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001025 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1026 "Reference Lstm: input and cellStateIn types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001027 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1028 "Reference Lstm: input and scratchBuffer types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001029 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1030 "Reference Lstm: input and outputStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001031 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1032 "Reference Lstm: input and cellStateOut types are mismatched");
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001033 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1034 "Reference Lstm: input and output types are mismatched");
Jan Eilersd01a83c2019-07-03 18:20:40 +01001035 // check layer parameters
Francis Murtaghbb590b42019-08-14 09:51:36 +01001036 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001037 "Reference Lstm: input and InputToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001038 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001039 "Reference Lstm: input and InputToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001040 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001041 "Reference Lstm: input and InputToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001042 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001043 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001044 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001045 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001046 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001047 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001048 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001049 "Reference Lstm: input and ForgetGateBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001050 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001051 "Reference Lstm: input and CellBias types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001052 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001053 "Reference Lstm: input and OutputGateBias types are mismatched");
1054 if (!descriptor.m_CifgEnabled)
1055 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001056 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001057 "Reference Lstm: input and InputToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001058 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001059 reasonIfUnsupported,
1060 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001061 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001062 "Reference Lstm: input and InputGateBias types are mismatched");
1063 if (descriptor.m_PeepholeEnabled)
1064 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001065 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001066 reasonIfUnsupported,
1067 "Reference Lstm: input and CellToInputWeights types are mismatched");
1068 }
1069 }
1070 if (descriptor.m_PeepholeEnabled)
1071 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001072 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001073 "Reference Lstm: input and CellToForgetWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001074 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001075 "Reference Lstm: input and CellToOutputWeights types are mismatched");
1076 }
1077 if (descriptor.m_ProjectionEnabled)
1078 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001079 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001080 "Reference Lstm: input and mProjectionWeights types are mismatched");
1081 if (paramsInfo.m_ProjectionBias != nullptr)
1082 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001083 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
Jan Eilersd01a83c2019-07-03 18:20:40 +01001084 "Reference Lstm: input and ProjectionBias types are mismatched");
1085 }
1086 }
1087 if (descriptor.m_LayerNormEnabled)
1088 {
1089 if (!descriptor.m_CifgEnabled)
1090 {
Francis Murtaghbb590b42019-08-14 09:51:36 +01001091 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001092 reasonIfUnsupported,
1093 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1094 }
Francis Murtaghbb590b42019-08-14 09:51:36 +01001095 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001096 reasonIfUnsupported,
1097 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001098 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001099 reasonIfUnsupported,
1100 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
Francis Murtaghbb590b42019-08-14 09:51:36 +01001101 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
Jan Eilersd01a83c2019-07-03 18:20:40 +01001102 reasonIfUnsupported,
1103 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1104 }
Nattapat Chaimanowongeb2b3292019-05-07 12:02:30 +01001105
1106 return supported;
telsoa01c577f2c2018-08-31 09:22:23 +01001107}
1108
saoste012df12b32018-11-28 16:57:20 +00001109bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1110 const TensorInfo& input1,
1111 const TensorInfo& output,
1112 Optional<std::string&> reasonIfUnsupported) const
1113{
Sadik Armagan2999a022019-04-09 14:20:12 +01001114 bool supported = true;
1115
Keith Davis5204aa82020-01-27 15:24:59 +00001116 std::array<DataType,5> supportedTypes = {
Sadik Armagan2999a022019-04-09 14:20:12 +01001117 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001118 DataType::Float16,
Keith Davis5204aa82020-01-27 15:24:59 +00001119 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001120 DataType::QAsymmU8,
1121 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001122 };
1123
1124 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1125 "Reference maximum: input 0 is not a supported type.");
1126
1127 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1128 "Reference maximum: input 1 is not a supported type.");
1129
1130 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1131 "Reference maximum: output is not a supported type.");
1132
1133 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1134 "Reference maximum: input 0 and Input 1 types are mismatched");
1135
1136 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1137 "Reference maximum: input and output types are mismatched");
1138
1139 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1140 "Reference maximum: shapes are not suitable for implicit broadcast.");
1141
1142 return supported;
saoste012df12b32018-11-28 16:57:20 +00001143}
1144
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001145bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1146 const TensorInfo& output,
1147 const MeanDescriptor& descriptor,
1148 Optional<std::string&> reasonIfUnsupported) const
narpra0132b90462018-09-13 11:07:48 +01001149{
James Conroy4d1ff582019-06-10 17:06:39 +01001150 bool supported = true;
1151 std::string meanLayerStr = "Mean";
1152 std::string outputTensorStr = "output";
1153
Matthew Jackson252df3a2019-09-11 09:19:18 +01001154 std::array<DataType,4> supportedTypes =
James Conroy4d1ff582019-06-10 17:06:39 +01001155 {
1156 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001157 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001158 DataType::QAsymmU8,
1159 DataType::QSymmS16
James Conroy4d1ff582019-06-10 17:06:39 +01001160 };
1161
1162 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1163 "Reference Mean: input type not supported.");
1164
1165 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1166 "Reference Mean: input and output types are mismatched");
1167
1168 if (descriptor.m_KeepDims)
1169 {
1170 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1171 reasonIfUnsupported,
1172 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1173 output.GetNumDimensions(),
1174 meanLayerStr, outputTensorStr).data());
1175 }
1176 else if (descriptor.m_Axis.empty())
1177 {
1178 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1179 reasonIfUnsupported,
1180 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1181 meanLayerStr, outputTensorStr).data());
1182 }
1183 else
1184 {
1185 auto outputDim = input.GetNumDimensions() - boost::numeric_cast<unsigned int>(descriptor.m_Axis.size());
1186
1187 if (outputDim > 0)
1188 {
1189 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1190 reasonIfUnsupported,
1191 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1192 meanLayerStr, outputTensorStr).data());
1193 }
1194 else
1195 {
1196 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1197 reasonIfUnsupported,
1198 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1199 meanLayerStr, outputTensorStr).data());
1200 }
1201 }
1202
1203 return supported;
narpra0132b90462018-09-13 11:07:48 +01001204}
1205
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001206bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
Nikhil Raj8599a412018-11-19 14:51:07 +00001207 const TensorInfo& output,
Jim Flynne242f2d2019-05-22 14:24:13 +01001208 const MergerDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001209 Optional<std::string&> reasonIfUnsupported) const
1210{
Jim Flynne242f2d2019-05-22 14:24:13 +01001211 return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001212}
1213
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001214bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1215 const TensorInfo &output,
1216 Optional<std::string &> reasonIfUnsupported) const
1217{
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001218 bool supported = true;
1219
1220 std::array<DataType,5> supportedTypes =
1221 {
1222 DataType::Float32,
1223 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001224 DataType::QAsymmU8,
1225 DataType::QSymmS16,
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001226 DataType::Boolean
1227 };
1228
1229 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1230 "Reference MemCopy: input type not supported");
1231
1232 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1233 "Reference MemCopy: output type not supported");
1234
1235 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1236 "Reference MemCopy: input and output types are mismatched");
1237
1238 return supported;
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001239}
1240
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001241bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1242 const TensorInfo& input1,
1243 const TensorInfo& output,
1244 Optional<std::string&> reasonIfUnsupported) const
1245{
Sadik Armagan2999a022019-04-09 14:20:12 +01001246 bool supported = true;
1247
Matthew Jackson9bff1442019-09-12 09:08:23 +01001248 std::array<DataType,4> supportedTypes = {
Sadik Armagan2999a022019-04-09 14:20:12 +01001249 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001250 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001251 DataType::QAsymmU8,
1252 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001253 };
1254
1255 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1256 "Reference minimum: input 0 is not a supported type.");
1257
1258 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1259 "Reference minimum: input 1 is not a supported type.");
1260
1261 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1262 "Reference minimum: output is not a supported type.");
1263
1264 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1265 "Reference minimum: input 0 and Input 1 types are mismatched");
1266
1267 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1268 "Reference minimum: input and output types are mismatched");
1269
1270 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1271 "Reference minimum: shapes are not suitable for implicit broadcast.");
1272
1273 return supported;
Éanna Ó Catháin20e58802018-12-04 10:29:06 +00001274}
1275
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001276bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1277 const TensorInfo& input1,
1278 const TensorInfo& output,
1279 Optional<std::string&> reasonIfUnsupported) const
1280{
Sadik Armagan2999a022019-04-09 14:20:12 +01001281 bool supported = true;
1282
Keith Davis5204aa82020-01-27 15:24:59 +00001283 std::array<DataType,5> supportedTypes = {
Sadik Armagan2999a022019-04-09 14:20:12 +01001284 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001285 DataType::Float16,
Keith Davis5204aa82020-01-27 15:24:59 +00001286 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001287 DataType::QAsymmU8,
1288 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001289 };
1290
1291 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1292 "Reference multiplication: input 0 is not a supported type.");
1293
1294 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1295 "Reference multiplication: input 1 is not a supported type.");
1296
1297 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1298 "Reference multiplication: output is not a supported type.");
1299
1300 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1301 "Reference multiplication: input 0 and Input 1 types are mismatched");
1302
1303 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1304 "Reference multiplication: input and output types are mismatched");
1305
1306 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1307 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1308
1309 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001310}
1311
1312bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1313 const TensorInfo& output,
1314 const NormalizationDescriptor& descriptor,
1315 Optional<std::string&> reasonIfUnsupported) const
Nina Drozd661dfa72018-10-02 11:14:17 +01001316{
Nina Drozd661dfa72018-10-02 11:14:17 +01001317 ignore_unused(descriptor);
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001318
1319 // Define supported types
Matteo Martincigh6aeb7712019-06-05 17:23:29 +01001320 std::array<DataType, 4> supportedTypes =
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001321 {
1322 DataType::Float16,
1323 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001324 DataType::QAsymmU8,
1325 DataType::QSymmS16
Matteo Martincigh2fc70c52019-06-05 14:12:48 +01001326 };
1327
1328 bool supported = true;
1329
1330 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1331 "Reference normalization: input type not supported.");
1332
1333 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1334 "Reference normalization: output type not supported.");
1335
1336 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1337 "Reference normalization: input and output shapes have different "
1338 "num total elements.");
1339
1340 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001341}
1342
Derek Lamberti901ea112019-12-10 22:07:09 +00001343bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1344 Optional<std::string&> /*reasonIfUnsupported*/) const
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001345{
Narumol Prangnawaratb6441e42019-06-04 11:22:00 +01001346 return true;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001347}
1348
1349bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1350 const TensorInfo& output,
1351 const PadDescriptor& descriptor,
1352 Optional<std::string&> reasonIfUnsupported) const
1353{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001354 ignore_unused(descriptor);
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001355 bool supported = true;
1356
1357 // Define supported output and inputs types.
Matthew Jackson252df3a2019-09-11 09:19:18 +01001358 std::array<DataType,4> supportedTypes =
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001359 {
1360 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001361 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001362 DataType::QAsymmU8,
1363 DataType::QSymmS16
Narumol Prangnawarate6eaf662019-07-08 08:57:17 +01001364 };
1365
1366 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1367 "Reference pad: input is not a supported type.");
1368
1369 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1370 "Reference pad: output is not a supported type.");
1371
1372 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1373 "Reference pad: input and output types are mismatched.");
1374
1375 return supported;
Nina Drozd661dfa72018-10-02 11:14:17 +01001376}
1377
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001378bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1379 const TensorInfo& output,
1380 const PermuteDescriptor& descriptor,
1381 Optional<std::string&> reasonIfUnsupported) const
1382{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001383 ignore_unused(descriptor);
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001384 bool supported = true;
1385
1386 // Define supported output and inputs types.
1387 std::array<DataType,3> supportedTypes =
1388 {
1389 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001390 DataType::QAsymmU8,
1391 DataType::QSymmS16
Narumol Prangnawarat86bb4e12019-07-08 11:36:05 +01001392 };
1393
1394 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1395 "Reference permute: input is not a supported type.");
1396
1397 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1398 "Reference permute: output is not a supported type.");
1399
1400 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1401 "Reference permute: input and output types are mismatched.");
1402
1403 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001404}
1405
1406bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1407 const TensorInfo& output,
1408 const Pooling2dDescriptor& descriptor,
1409 Optional<std::string&> reasonIfUnsupported) const
1410{
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001411 ignore_unused(descriptor);
Teresa Charlina3b20472019-06-06 11:12:32 +01001412 bool supported = true;
1413
1414 // Define supported output and inputs types.
Matthew Jackson252df3a2019-09-11 09:19:18 +01001415 std::array<DataType,4> supportedTypes =
Teresa Charlina3b20472019-06-06 11:12:32 +01001416 {
1417 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001418 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001419 DataType::QAsymmU8,
1420 DataType::QSymmS16
Teresa Charlina3b20472019-06-06 11:12:32 +01001421 };
1422
1423 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1424 "Reference poolind2d: input is not a supported type.");
1425
1426 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1427 "Reference poolind2d: output is not a supported type.");
1428
1429 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1430 "Reference poolind2d: input and output types are mismatched.");
1431
1432 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001433}
1434
Derek Lamberti5f400d62019-03-25 15:41:58 +00001435bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1436 const TensorInfo& output,
1437 Optional<std::string&> reasonIfUnsupported) const
1438{
1439 bool supported = true;
1440
Finn Williamsfd271062019-12-04 14:27:27 +00001441 // Define supported input types.
Keith Davis5e51cd82020-01-29 16:52:59 +00001442 std::array<DataType,5> supportedInputTypes = {
Keith Davis5204aa82020-01-27 15:24:59 +00001443 DataType::QSymmS8,
Keith Davis5e51cd82020-01-29 16:52:59 +00001444 DataType::Float32,
1445 DataType::QAsymmU8,
1446 DataType::QSymmS8,
1447 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001448 };
1449
1450 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1451 "Reference quantize: input type not supported.");
1452
1453 // Define supported output types.
Finn Williamsfd271062019-12-04 14:27:27 +00001454 std::array<DataType,3> supportedOutputTypes = {
Derek Lambertif90c56d2020-01-10 17:14:08 +00001455 DataType::QAsymmU8,
Finn Williamsfd271062019-12-04 14:27:27 +00001456 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001457 DataType::QSymmS16
Derek Lamberti5f400d62019-03-25 15:41:58 +00001458 };
1459 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1460 "Reference quantize: output type not supported.");
1461
1462 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1463 "Reference quantize: input and output shapes have different num total elements.");
1464
1465 return supported;
1466}
1467
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001468bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
Kevin Maya023c402019-12-12 17:28:05 +00001469 const TensorInfo& output,
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001470 const ReshapeDescriptor& descriptor,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001471 Optional<std::string&> reasonIfUnsupported) const
1472{
Kevin Maya023c402019-12-12 17:28:05 +00001473 ignore_unused(output);
Matteo Martincigh992d6dc2019-01-10 17:34:20 +00001474 ignore_unused(descriptor);
Nina Drozd2f2778f2019-05-27 10:37:05 +01001475 // Define supported output types.
Keith Davis5204aa82020-01-27 15:24:59 +00001476 std::array<DataType,6> supportedOutputTypes =
Nina Drozd2f2778f2019-05-27 10:37:05 +01001477 {
1478 DataType::Float32,
1479 DataType::Float16,
Narumol Prangnawarat0718ee92019-09-13 16:53:38 +01001480 DataType::Signed32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001481 DataType::QAsymmU8,
Keith Davis5204aa82020-01-27 15:24:59 +00001482 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001483 DataType::QSymmS16
Nina Drozd2f2778f2019-05-27 10:37:05 +01001484 };
1485 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1486 "Reference reshape: input type not supported.");
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001487}
1488
1489bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
Sadik Armaganc625f002018-12-17 11:32:16 +00001490 const TensorInfo& output,
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001491 Optional<std::string&> reasonIfUnsupported) const
1492{
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001493 bool supported = true;
Matthew Jackson9bff1442019-09-12 09:08:23 +01001494 std::array<DataType,4> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001495 {
1496 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001497 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001498 DataType::QAsymmU8,
1499 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001500 };
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +01001501
1502 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1503 "Reference ResizeBilinear: input type not supported");
1504
1505 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1506 "Reference ResizeBilinear: output type not supported");
1507
1508 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1509 "Reference ResizeBilinear: input and output types not matching");
1510
1511 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001512}
1513
Teresa Charlin970f43b2019-07-01 13:51:07 +01001514bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1515 const TensorInfo& output,
1516 const ResizeDescriptor& descriptor,
1517 Optional<std::string&> reasonIfUnsupported) const
1518{
Derek Lamberti901ea112019-12-10 22:07:09 +00001519 boost::ignore_unused(descriptor);
Teresa Charlin970f43b2019-07-01 13:51:07 +01001520 bool supported = true;
Keith Davis5204aa82020-01-27 15:24:59 +00001521 std::array<DataType,5> supportedTypes =
Teresa Charlin970f43b2019-07-01 13:51:07 +01001522 {
1523 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001524 DataType::Float16,
Keith Davis5204aa82020-01-27 15:24:59 +00001525 DataType::QSymmS8,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001526 DataType::QAsymmU8,
1527 DataType::QSymmS16
Teresa Charlin970f43b2019-07-01 13:51:07 +01001528 };
1529
1530 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1531 "Reference Resize: input type not supported");
1532
1533 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1534 "Reference Resize: output type not supported");
1535
1536 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1537 "Reference Resize: input and output types not matching");
1538
1539 return supported;
1540}
1541
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001542bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
1543 const TensorInfo& output,
1544 Optional<std::string&> reasonIfUnsupported) const
1545{
josh minor4a3c6102020-01-06 16:40:46 -06001546 return IsElementwiseUnarySupported(input,
1547 output,
1548 ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt),
1549 reasonIfUnsupported);
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +00001550}
1551
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001552bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1553 const TensorInfo& output,
1554 const SliceDescriptor& descriptor,
1555 Optional<std::string&> reasonIfUnsupported) const
1556{
Derek Lamberti901ea112019-12-10 22:07:09 +00001557 boost::ignore_unused(descriptor);
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001558 bool supported = true;
1559
1560 std::array<DataType, 3> supportedTypes =
1561 {
1562 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001563 DataType::QAsymmU8,
1564 DataType::QSymmS16
Aron Virginas-Tar92b9f872019-09-17 17:27:04 +01001565 };
1566
1567 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1568 "Reference Slice: input type not supported");
1569
1570 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1571 "Reference Slice: output type not supported");
1572
1573 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1574 "Reference Slice: input and output types are mismatched");
1575
1576 return supported;
1577}
1578
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001579bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1580 const TensorInfo& output,
1581 const SoftmaxDescriptor& descriptor,
1582 Optional<std::string&> reasonIfUnsupported) const
1583{
Derek Lamberti901ea112019-12-10 22:07:09 +00001584 boost::ignore_unused(descriptor);
nikraj01248683f2019-05-29 16:46:50 +01001585 bool supported = true;
Matthew Jackson9bff1442019-09-12 09:08:23 +01001586 std::array<DataType,4> supportedTypes =
nikraj01248683f2019-05-29 16:46:50 +01001587 {
1588 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001589 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001590 DataType::QAsymmU8,
1591 DataType::QSymmS16
nikraj01248683f2019-05-29 16:46:50 +01001592 };
1593
1594 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001595 "Reference Softmax: output type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001596
1597 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001598 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001599
1600 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
Aron Virginas-Tare662a942019-10-14 15:12:00 +01001601 "Reference Softmax: input type not supported");
nikraj01248683f2019-05-29 16:46:50 +01001602
1603 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001604}
1605
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001606bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1607 const TensorInfo& output,
1608 const SpaceToBatchNdDescriptor& descriptor,
1609 Optional<std::string&> reasonIfUnsupported) const
1610{
Derek Lamberti901ea112019-12-10 22:07:09 +00001611 boost::ignore_unused(descriptor);
nikraj01120522a2019-05-31 11:33:07 +01001612 bool supported = true;
Matthew Jackson9bff1442019-09-12 09:08:23 +01001613 std::array<DataType,4> supportedTypes =
nikraj01120522a2019-05-31 11:33:07 +01001614 {
1615 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001616 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001617 DataType::QAsymmU8,
1618 DataType::QSymmS16
nikraj01120522a2019-05-31 11:33:07 +01001619 };
1620
1621 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1622 "Reference SpaceToBatchNd: input type not supported");
1623
1624 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1625 "Reference SpaceToBatchNd: output type not supported");
1626
1627 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1628 "Reference SpaceToBatchNd: input and output types are mismatched");
1629
1630 return supported;
Nattapat Chaimanowong3ea76d52018-11-09 14:10:38 +00001631}
1632
Keith Davisa57eccb2019-06-14 17:33:22 +01001633bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
Keith Davis51910332019-06-26 15:28:43 +01001634 const TensorInfo& output,
1635 const SpaceToDepthDescriptor& descriptor,
1636 Optional<std::string&> reasonIfUnsupported) const
Keith Davisa57eccb2019-06-14 17:33:22 +01001637{
1638
1639 ignore_unused(descriptor);
1640 bool supported = true;
1641
Matthew Jackson9bff1442019-09-12 09:08:23 +01001642 std::array<DataType,4> supportedTypes =
Keith Davisa57eccb2019-06-14 17:33:22 +01001643 {
1644 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001645 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001646 DataType::QAsymmU8,
1647 DataType::QSymmS16
Keith Davisa57eccb2019-06-14 17:33:22 +01001648 };
1649
1650 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1651 "Reference SpaceToDepth: input type not supported");
1652
1653 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1654 "Reference SpaceToDepth: output type not supported");
1655
1656 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1657 "Reference SpaceToDepth: input and output types are mismatched");
1658
1659 return supported;
1660}
1661
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001662bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1663 const ViewsDescriptor& descriptor,
1664 Optional<std::string&> reasonIfUnsupported) const
1665{
1666 ignore_unused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001667 bool supported = true;
Matthew Jackson9bff1442019-09-12 09:08:23 +01001668 std::array<DataType,4> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001669 {
1670 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001671 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001672 DataType::QAsymmU8,
1673 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001674 };
1675
1676 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1677 "Reference splitter: input type not supported");
1678
1679 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001680}
1681
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001682bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1683 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1684 const ViewsDescriptor& descriptor,
1685 Optional<std::string&> reasonIfUnsupported) const
1686{
1687 ignore_unused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001688 bool supported = true;
Matthew Jackson9bff1442019-09-12 09:08:23 +01001689 std::array<DataType,4> supportedTypes =
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001690 {
1691 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001692 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001693 DataType::QAsymmU8,
1694 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001695 };
1696
1697 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1698 "Reference splitter: output type not supported");
1699 for (const TensorInfo output : outputs)
1700 {
1701 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1702 "Reference splitter: input type not supported");
1703
1704 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1705 "Reference splitter: input and output types mismatched.");
1706 }
1707
1708 return supported;
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001709}
1710
Matthew Jackson81e601c2019-07-11 12:07:09 +01001711bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1712 const TensorInfo& output,
1713 const StackDescriptor& descriptor,
1714 Optional<std::string&> reasonIfUnsupported) const
1715{
1716 ignore_unused(descriptor);
1717
1718 bool supported = true;
Matthew Jacksone69c3992019-09-09 14:31:21 +01001719 std::array<DataType,4> supportedTypes =
Matthew Jackson81e601c2019-07-11 12:07:09 +01001720 {
1721 DataType::Float32,
Matthew Jacksone69c3992019-09-09 14:31:21 +01001722 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001723 DataType::QAsymmU8,
1724 DataType::QSymmS16
Matthew Jackson81e601c2019-07-11 12:07:09 +01001725 };
1726
1727 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1728 "Reference stack: output type not supported");
1729 for (const TensorInfo* input : inputs)
1730 {
1731 BOOST_ASSERT(input != nullptr);
1732 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
1733 "Reference stack: input type not supported");
1734
1735 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
1736 "Reference stack: input and output types mismatched.");
1737 }
1738
1739 return supported;
1740}
1741
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00001742bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
1743 const TensorInfo& output,
1744 const StridedSliceDescriptor& descriptor,
1745 Optional<std::string&> reasonIfUnsupported) const
1746{
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00001747 ignore_unused(descriptor);
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001748 bool supported = true;
1749
1750 std::array<DataType,3> supportedTypes =
1751 {
1752 DataType::Float32,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001753 DataType::QAsymmU8,
1754 DataType::QSymmS16
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +01001755 };
1756
1757 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1758 "Reference StridedSlice: input type not supported");
1759
1760 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1761 "Reference StridedSlice: output type not supported");
1762
1763 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1764 "Reference StridedSlice: input and output types are mismatched");
1765
1766 return supported;
Nattapat Chaimanowong1216b582018-11-23 15:33:41 +00001767}
1768
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001769bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
1770 const TensorInfo& input1,
1771 const TensorInfo& output,
1772 Optional<std::string&> reasonIfUnsupported) const
1773{
Sadik Armagan2999a022019-04-09 14:20:12 +01001774 bool supported = true;
1775
Matthew Jackson9bff1442019-09-12 09:08:23 +01001776 std::array<DataType,4> supportedTypes = {
Sadik Armagan2999a022019-04-09 14:20:12 +01001777 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001778 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001779 DataType::QAsymmU8,
1780 DataType::QSymmS16
Sadik Armagan2999a022019-04-09 14:20:12 +01001781 };
1782
1783 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1784 "Reference subtraction: input 0 is not a supported type.");
1785
1786 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1787 "Reference subtraction: input 1 is not a supported type.");
1788
1789 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1790 "Reference subtraction: output is not a supported type.");
1791
1792 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1793 "Reference subtraction: input 0 and Input 1 types are mismatched");
1794
1795 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1796 "Reference subtraction: input and output types are mismatched");
1797
1798 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1799 "Reference subtraction: shapes are not suitable for implicit broadcast.");
1800
1801 return supported;
Aron Virginas-Tarb5acbb72018-10-15 11:11:51 +01001802}
1803
Matteo Martincighab9e5252019-06-13 17:27:46 +01001804bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
1805 const TensorInfo& alpha,
1806 const TensorInfo& output,
1807 Optional<std::string&> reasonIfUnsupported) const
1808{
1809 bool supported = true;
1810
Matthew Jackson9bff1442019-09-12 09:08:23 +01001811 std::array<DataType, 4> supportedTypes
Matteo Martincighab9e5252019-06-13 17:27:46 +01001812 {
1813 DataType::Float32,
Matthew Jackson9bff1442019-09-12 09:08:23 +01001814 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001815 DataType::QAsymmU8,
1816 DataType::QSymmS16
Matteo Martincighab9e5252019-06-13 17:27:46 +01001817 };
1818
1819 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1820 "PReLU: input is not a supported type.");
1821
1822 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
1823 "PReLU: alpha is not a supported type.");
1824
1825 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1826 "PReLU: output is not a supported type.");
1827
1828 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
1829 "PReLU: input, alpha and output types are mismatched");
1830
1831 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
1832 "PReLU: shapes are not suitable for implicit broadcast");
1833
1834 return supported;
1835}
1836
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001837bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
1838 const TensorInfo& output,
1839 const TransposeConvolution2dDescriptor& descriptor,
1840 const TensorInfo& weights,
1841 const Optional<TensorInfo>& biases,
1842 Optional<std::string&> reasonIfUnsupported) const
1843{
Derek Lamberti901ea112019-12-10 22:07:09 +00001844 boost::ignore_unused(descriptor);
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001845 bool supported = true;
1846
Matthew Jackson252df3a2019-09-11 09:19:18 +01001847 std::array<DataType,4> supportedTypes =
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001848 {
1849 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001850 DataType::Float16,
Derek Lambertif90c56d2020-01-10 17:14:08 +00001851 DataType::QAsymmU8,
1852 DataType::QSymmS16
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001853 };
1854
1855 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1856 "Reference TransposeConvolution2d: input is not a supported type.");
1857
1858 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1859 "Reference TransposeConvolution2d: output is not a supported type.");
1860
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001861 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1862 "Reference TransposeConvolution2d: input and output types mismatched.");
1863
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00001864
1865 const DataType inputType = input.GetDataType();
Derek Lambertif90c56d2020-01-10 17:14:08 +00001866 if (inputType == DataType::QAsymmU8)
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00001867 {
Derek Lambertid466a542020-01-22 15:37:29 +00001868 ARMNN_NO_DEPRECATE_WARN_BEGIN
1869 std::array<DataType, 3> supportedWeightTypes =
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00001870 {
Derek Lambertif90c56d2020-01-10 17:14:08 +00001871 DataType::QAsymmU8,
Derek Lambertid466a542020-01-22 15:37:29 +00001872 DataType::QSymmS8,
1873 DataType::QuantizedSymm8PerAxis //Deprecated
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00001874 };
Derek Lambertid466a542020-01-22 15:37:29 +00001875 ARMNN_NO_DEPRECATE_WARN_END
Aron Virginas-Tar94d3b932019-11-11 12:54:47 +00001876
1877 supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
1878 "Reference TransposeConvolution2d: weights type not supported for "
1879 "quantized input.");
1880 }
1881 else
1882 {
1883 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1884 "Reference TransposeConvolution2d: weights is not a supported type.");
1885
1886 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
1887 "Reference TransposeConvolution2d: input and weights types mismatched.");
1888 }
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001889
1890 if (biases.has_value())
1891 {
Matthew Jackson252df3a2019-09-11 09:19:18 +01001892 std::array<DataType,3> biasesSupportedTypes =
Aron Virginas-Tar651aafe2019-08-05 11:52:05 +01001893 {
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001894 DataType::Float32,
Matthew Jackson252df3a2019-09-11 09:19:18 +01001895 DataType::Float16,
Aron Virginas-Tar98180ef2019-06-26 15:02:47 +01001896 DataType::Signed32
1897 };
1898 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
1899 "Reference TransposeConvolution2d: biases is not a supported type.");
1900 }
1901
1902 return supported;
1903}
1904
arovir011c7c81b2018-10-08 11:34:28 +01001905} // namespace armnn